diff --git a/.buildkite/pipelines/intake.yml b/.buildkite/pipelines/intake.yml index e702c97248cdc..944230377d077 100644 --- a/.buildkite/pipelines/intake.yml +++ b/.buildkite/pipelines/intake.yml @@ -56,7 +56,7 @@ steps: timeout_in_minutes: 300 matrix: setup: - BWC_VERSION: ["7.17.22", "8.13.4", "8.14.0", "8.15.0"] + BWC_VERSION: ["7.17.22", "8.13.5", "8.14.0", "8.15.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.buildkite/pipelines/periodic-packaging.yml b/.buildkite/pipelines/periodic-packaging.yml index d013780b1fd0a..5ac361c810627 100644 --- a/.buildkite/pipelines/periodic-packaging.yml +++ b/.buildkite/pipelines/periodic-packaging.yml @@ -529,8 +529,8 @@ steps: env: BWC_VERSION: 8.12.2 - - label: "{{matrix.image}} / 8.13.4 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.13.4 + - label: "{{matrix.image}} / 8.13.5 / packaging-tests-upgrade" + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.13.5 timeout_in_minutes: 300 matrix: setup: @@ -543,7 +543,7 @@ steps: machineType: custom-16-32768 buildDirectory: /dev/shm/bk env: - BWC_VERSION: 8.13.4 + BWC_VERSION: 8.13.5 - label: "{{matrix.image}} / 8.14.0 / packaging-tests-upgrade" command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.14.0 diff --git a/.buildkite/pipelines/periodic.yml b/.buildkite/pipelines/periodic.yml index 7e9f6872b9146..7c5f683cf9692 100644 --- a/.buildkite/pipelines/periodic.yml +++ b/.buildkite/pipelines/periodic.yml @@ -591,8 +591,8 @@ steps: - signal_reason: agent_stop limit: 3 - - label: 8.13.4 / bwc - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.13.4#bwcTest + - label: 8.13.5 / bwc + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.13.5#bwcTest timeout_in_minutes: 300 agents: provider: gcp @@ -601,7 +601,7 @@ steps: buildDirectory: /dev/shm/bk preemptible: true env: - BWC_VERSION: 8.13.4 + BWC_VERSION: 8.13.5 retry: automatic: - exit_status: "-1" @@ -714,7 +714,7 @@ steps: setup: ES_RUNTIME_JAVA: - openjdk17 - BWC_VERSION: ["7.17.22", "8.13.4", "8.14.0", "8.15.0"] + BWC_VERSION: ["7.17.22", "8.13.5", "8.14.0", "8.15.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 @@ -760,7 +760,7 @@ steps: - openjdk17 - openjdk21 - openjdk22 - BWC_VERSION: ["7.17.22", "8.13.4", "8.14.0", "8.15.0"] + BWC_VERSION: ["7.17.22", "8.13.5", "8.14.0", "8.15.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.ci/bwcVersions b/.ci/bwcVersions index 77e51005d5ace..b9afdcf23b858 100644 --- a/.ci/bwcVersions +++ b/.ci/bwcVersions @@ -30,6 +30,6 @@ BWC_VERSION: - "8.10.4" - "8.11.4" - "8.12.2" - - "8.13.4" + - "8.13.5" - "8.14.0" - "8.15.0" diff --git a/.ci/snapshotBwcVersions b/.ci/snapshotBwcVersions index 49f3708ce4af9..213e4e93bc81d 100644 --- a/.ci/snapshotBwcVersions +++ b/.ci/snapshotBwcVersions @@ -1,5 +1,5 @@ BWC_VERSION: - "7.17.22" - - "8.13.4" + - "8.13.5" - "8.14.0" - "8.15.0" diff --git a/distribution/tools/cli-launcher/src/main/java/org/elasticsearch/launcher/CliToolLauncher.java b/distribution/tools/cli-launcher/src/main/java/org/elasticsearch/launcher/CliToolLauncher.java index 4fd2512f2cbbe..981033aeccd8c 100644 --- a/distribution/tools/cli-launcher/src/main/java/org/elasticsearch/launcher/CliToolLauncher.java +++ b/distribution/tools/cli-launcher/src/main/java/org/elasticsearch/launcher/CliToolLauncher.java @@ -91,7 +91,7 @@ static Thread createShutdownHook(Terminal terminal, Closeable closeable) { try { closeable.close(); } catch (final IOException e) { - e.printStackTrace(terminal.getErrorWriter()); + terminal.errorPrintln(e); } terminal.flush(); // make sure to flush whatever the close or error might have written }, "elasticsearch-cli-shutdown"); diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ErrorPumpThread.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ErrorPumpThread.java index a6eb32cb1bb38..94c7653a08e0e 100644 --- a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ErrorPumpThread.java +++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ErrorPumpThread.java @@ -9,12 +9,14 @@ package org.elasticsearch.server.cli; import org.elasticsearch.bootstrap.BootstrapInfo; +import org.elasticsearch.cli.Terminal; +import org.elasticsearch.cli.Terminal.Verbosity; import java.io.BufferedReader; +import java.io.Closeable; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; -import java.io.PrintWriter; import java.nio.charset.StandardCharsets; import java.util.List; import java.util.concurrent.CountDownLatch; @@ -29,9 +31,9 @@ * {@link BootstrapInfo#SERVER_READY_MARKER} signals the server is ready and the cli may * detach if daemonizing. All other messages are passed through to stderr. */ -class ErrorPumpThread extends Thread { +class ErrorPumpThread extends Thread implements Closeable { private final BufferedReader reader; - private final PrintWriter writer; + private final Terminal terminal; // a latch which changes state when the server is ready or has had a bootstrap error private final CountDownLatch readyOrDead = new CountDownLatch(1); @@ -42,10 +44,24 @@ class ErrorPumpThread extends Thread { // an unexpected io failure that occurred while pumping stderr private volatile IOException ioFailure; - ErrorPumpThread(PrintWriter errOutput, InputStream errInput) { + ErrorPumpThread(Terminal terminal, InputStream errInput) { super("server-cli[stderr_pump]"); this.reader = new BufferedReader(new InputStreamReader(errInput, StandardCharsets.UTF_8)); - this.writer = errOutput; + this.terminal = terminal; + } + + private void checkForIoFailure() throws IOException { + IOException failure = ioFailure; + ioFailure = null; + if (failure != null) { + throw failure; + } + } + + @Override + public void close() throws IOException { + assert isAlive() == false : "Pump thread must be drained first"; + checkForIoFailure(); } /** @@ -56,9 +72,7 @@ class ErrorPumpThread extends Thread { */ boolean waitUntilReady() throws IOException { nonInterruptibleVoid(readyOrDead::await); - if (ioFailure != null) { - throw ioFailure; - } + checkForIoFailure(); return ready; } @@ -81,13 +95,13 @@ public void run() { ready = true; readyOrDead.countDown(); } else if (filter.contains(line) == false) { - writer.println(line); + terminal.errorPrintln(Verbosity.SILENT, line, false); } } } catch (IOException e) { ioFailure = e; } finally { - writer.flush(); + terminal.flush(); readyOrDead.countDown(); } } diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/KeystorePasswordTerminal.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/KeystorePasswordTerminal.java index bf03acaf7a5da..0fddf76caff59 100644 --- a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/KeystorePasswordTerminal.java +++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/KeystorePasswordTerminal.java @@ -23,7 +23,7 @@ class KeystorePasswordTerminal extends Terminal implements Closeable { private final SecureString password; KeystorePasswordTerminal(Terminal delegate, SecureString password) { - super(delegate.getReader(), delegate.getWriter(), delegate.getErrorWriter()); + super(delegate); this.delegate = delegate; this.password = password; setVerbosity(delegate.getVerbosity()); diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerCli.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerCli.java index 0505ab86127cf..7b904d4cb5a89 100644 --- a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerCli.java +++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerCli.java @@ -27,6 +27,7 @@ import org.elasticsearch.env.Environment; import org.elasticsearch.monitor.jvm.JvmInfo; +import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.util.Arrays; @@ -231,7 +232,7 @@ private ServerArgs createArgs(OptionSet options, Environment env, SecureSettings } @Override - public void close() { + public void close() throws IOException { if (server != null) { server.stop(); } diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerProcess.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerProcess.java index fa948572e7675..35b5d93b39933 100644 --- a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerProcess.java +++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerProcess.java @@ -61,16 +61,21 @@ public long pid() { */ public synchronized void detach() throws IOException { errorPump.drain(); - IOUtils.close(jvmProcess.getOutputStream(), jvmProcess.getInputStream(), jvmProcess.getErrorStream()); - detached = true; + try { + IOUtils.close(jvmProcess.getOutputStream(), jvmProcess.getInputStream(), jvmProcess.getErrorStream(), errorPump); + } finally { + detached = true; + } } /** * Waits for the subprocess to exit. */ - public int waitFor() { + public int waitFor() throws IOException { errorPump.drain(); - return nonInterruptible(jvmProcess::waitFor); + int exitCode = nonInterruptible(jvmProcess::waitFor); + errorPump.close(); + return exitCode; } /** @@ -81,7 +86,7 @@ public int waitFor() { * *

Note that if {@link #detach()} has been called, this method is a no-op. */ - public synchronized void stop() { + public synchronized void stop() throws IOException { if (detached) { return; } @@ -93,7 +98,7 @@ public synchronized void stop() { /** * Stop the subprocess, sending a SIGKILL. */ - public void forceStop() { + public void forceStop() throws IOException { assert detached == false; jvmProcess.destroyForcibly(); waitFor(); diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerProcessBuilder.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerProcessBuilder.java index b90ac25f5d57d..fcc290ebe9e72 100644 --- a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerProcessBuilder.java +++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerProcessBuilder.java @@ -154,7 +154,7 @@ ServerProcess start(ProcessStarter processStarter) throws UserException { boolean success = false; try { jvmProcess = createProcess(getCommand(), getJvmArgs(), jvmOptions, getEnvironment(), processStarter); - errorPump = new ErrorPumpThread(terminal.getErrorWriter(), jvmProcess.getErrorStream()); + errorPump = new ErrorPumpThread(terminal, jvmProcess.getErrorStream()); errorPump.start(); sendArgs(serverArgs, jvmProcess.getOutputStream()); diff --git a/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerCliTests.java b/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerCliTests.java index e469764590bd6..38a64a778fc27 100644 --- a/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerCliTests.java +++ b/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerCliTests.java @@ -33,6 +33,7 @@ import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; +import java.util.List; import java.util.Locale; import java.util.Optional; import java.util.concurrent.atomic.AtomicBoolean; @@ -43,8 +44,11 @@ import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.emptyString; +import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.matchesRegex; import static org.hamcrest.Matchers.not; public class ServerCliTests extends CommandTestCase { @@ -321,11 +325,16 @@ protected ServerProcess startServer(Terminal terminal, ProcessInfo processInfo, throw new InterruptedException("interrupted while get jvm options"); } }; - var e = expectThrows( - InterruptedException.class, - () -> command.main(new String[0], terminal, new ProcessInfo(sysprops, envVars, esHomeDir)) - ); - assertThat(e.getMessage(), equalTo("interrupted while get jvm options")); + + int exitCode = command.main(new String[0], terminal, new ProcessInfo(sysprops, envVars, esHomeDir)); + assertThat(exitCode, is(ExitCodes.CODE_ERROR)); + + String[] lines = terminal.getErrorOutput().split(System.lineSeparator()); + assertThat(List.of(lines), hasSize(greaterThan(10))); // at least decent sized stacktrace + assertThat(lines[0], is("java.lang.InterruptedException: interrupted while get jvm options")); + assertThat(lines[1], matchesRegex("\\tat org.elasticsearch.server.cli.ServerCliTests.+startServer\\(ServerCliTests.java:\\d+\\)")); + assertThat(lines[lines.length - 1], matchesRegex("\tat java.base/java.lang.Thread.run\\(Thread.java:\\d+\\)")); + command.close(); } diff --git a/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerProcessTests.java b/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerProcessTests.java index b9f2eb73b30b5..dc36485fb77ab 100644 --- a/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerProcessTests.java +++ b/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerProcessTests.java @@ -38,6 +38,7 @@ import java.util.List; import java.util.Map; import java.util.concurrent.CancellationException; +import java.util.concurrent.CompletableFuture; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; @@ -393,15 +394,24 @@ public void testWaitFor() throws Exception { stderr.println("final message"); }; var server = startProcess(false, false); + + CompletableFuture stopping = new CompletableFuture<>(); new Thread(() -> { - // simulate stop run as shutdown hook in another thread, eg from Ctrl-C - nonInterruptibleVoid(mainReady::await); - server.stop(); + try { + // simulate stop run as shutdown hook in another thread, eg from Ctrl-C + nonInterruptibleVoid(mainReady::await); + server.stop(); + stopping.complete(null); + } catch (Throwable e) { + stopping.completeExceptionally(e); + } }).start(); int exitCode = server.waitFor(); assertThat(process.main.isDone(), is(true)); assertThat(exitCode, equalTo(0)); assertThat(terminal.getErrorOutput(), containsString("final message")); + // rethrow any potential exception observed while stopping + stopping.get(); } public void testProcessDies() throws Exception { diff --git a/distribution/tools/windows-service-cli/src/main/java/org/elasticsearch/windows/service/WindowsServiceDaemon.java b/distribution/tools/windows-service-cli/src/main/java/org/elasticsearch/windows/service/WindowsServiceDaemon.java index 22474e63ab0df..66ae78470c55d 100644 --- a/distribution/tools/windows-service-cli/src/main/java/org/elasticsearch/windows/service/WindowsServiceDaemon.java +++ b/distribution/tools/windows-service-cli/src/main/java/org/elasticsearch/windows/service/WindowsServiceDaemon.java @@ -23,6 +23,8 @@ import org.elasticsearch.server.cli.ServerProcessBuilder; import org.elasticsearch.server.cli.ServerProcessUtils; +import java.io.IOException; + /** * Starts an Elasticsearch process, but does not wait for it to exit. *

@@ -55,7 +57,7 @@ public void execute(Terminal terminal, OptionSet options, Environment env, Proce } @Override - public void close() { + public void close() throws IOException { if (server != null) { server.stop(); } diff --git a/distribution/tools/windows-service-cli/src/test/java/org/elasticsearch/windows/service/ProcrunCommandTests.java b/distribution/tools/windows-service-cli/src/test/java/org/elasticsearch/windows/service/ProcrunCommandTests.java index e4b651fcb77af..8f44eaa80f23a 100644 --- a/distribution/tools/windows-service-cli/src/test/java/org/elasticsearch/windows/service/ProcrunCommandTests.java +++ b/distribution/tools/windows-service-cli/src/test/java/org/elasticsearch/windows/service/ProcrunCommandTests.java @@ -22,6 +22,8 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.emptyString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.startsWith; public class ProcrunCommandTests extends WindowsServiceCliTestCase { @@ -111,8 +113,10 @@ protected String getDefaultFailureMessage() { public void testMissingExe() throws Exception { Files.delete(serviceExe); - var e = expectThrows(IllegalStateException.class, () -> executeMain("install")); - assertThat(e.getMessage(), containsString("Missing procrun exe")); + int exitCode = executeMain("install"); + + assertThat(exitCode, is(ExitCodes.CODE_ERROR)); + assertThat(terminal.getErrorOutput(), startsWith("java.lang.IllegalStateException: Missing procrun exe")); } public void testServiceId() throws Exception { diff --git a/docs/changelog/106820.yaml b/docs/changelog/106820.yaml new file mode 100644 index 0000000000000..d854e3984c13d --- /dev/null +++ b/docs/changelog/106820.yaml @@ -0,0 +1,5 @@ +pr: 106820 +summary: Add a capabilities API to check node and cluster capabilities +area: Infra/REST API +type: feature +issues: [] diff --git a/docs/changelog/107088.yaml b/docs/changelog/107088.yaml new file mode 100644 index 0000000000000..01a926f185eea --- /dev/null +++ b/docs/changelog/107088.yaml @@ -0,0 +1,5 @@ +pr: 107088 +summary: Introduce role description field +area: Authorization +type: enhancement +issues: [] diff --git a/docs/changelog/107876.yaml b/docs/changelog/107876.yaml new file mode 100644 index 0000000000000..21624cacf7e1d --- /dev/null +++ b/docs/changelog/107876.yaml @@ -0,0 +1,5 @@ +pr: 107876 +summary: "ESQL: Add aggregates node level reduction" +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/changelog/107886.yaml b/docs/changelog/107886.yaml deleted file mode 100644 index a328bc2a2a208..0000000000000 --- a/docs/changelog/107886.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 107886 -summary: Cluster state role mapper file settings service -area: Authorization -type: enhancement -issues: [] diff --git a/docs/changelog/107891.yaml b/docs/changelog/107891.yaml new file mode 100644 index 0000000000000..deb3fbd2258ff --- /dev/null +++ b/docs/changelog/107891.yaml @@ -0,0 +1,6 @@ +pr: 107891 +summary: Fix `startOffset` must be non-negative error in XLMRoBERTa tokenizer +area: Machine Learning +type: bug +issues: + - 104626 diff --git a/docs/changelog/108088.yaml b/docs/changelog/108088.yaml new file mode 100644 index 0000000000000..95c58f6dc19f1 --- /dev/null +++ b/docs/changelog/108088.yaml @@ -0,0 +1,5 @@ +pr: 108088 +summary: Add a SIMD (AVX2) optimised vector distance function for int7 on x64 +area: "Search" +type: enhancement +issues: [] diff --git a/docs/changelog/108238.yaml b/docs/changelog/108238.yaml new file mode 100644 index 0000000000000..607979c2eb0ac --- /dev/null +++ b/docs/changelog/108238.yaml @@ -0,0 +1,6 @@ +pr: 108238 +summary: "Nativeaccess: try to load all located libsystemds" +area: Infra/Core +type: bug +issues: + - 107878 diff --git a/docs/changelog/108276.yaml b/docs/changelog/108276.yaml deleted file mode 100644 index aaa78073f544e..0000000000000 --- a/docs/changelog/108276.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 108276 -summary: Fix tsdb codec when doc-values spread in two blocks -area: TSDB -type: bug -issues: [] diff --git a/docs/changelog/108280.yaml b/docs/changelog/108280.yaml deleted file mode 100644 index b36a2f3769124..0000000000000 --- a/docs/changelog/108280.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 108280 -summary: Ensure necessary security context for s3 bulk deletions -area: Snapshot/Restore -type: bug -issues: - - 108049 diff --git a/docs/changelog/108283.yaml b/docs/changelog/108283.yaml deleted file mode 100644 index 6341a8775b729..0000000000000 --- a/docs/changelog/108283.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 108283 -summary: Fix `BlockHash` `DirectEncoder` -area: ES|QL -type: bug -issues: - - 108268 diff --git a/docs/changelog/108300.yaml b/docs/changelog/108300.yaml new file mode 100644 index 0000000000000..c4d6e468113a4 --- /dev/null +++ b/docs/changelog/108300.yaml @@ -0,0 +1,5 @@ +pr: 108300 +summary: "ESQL: Add more time span units" +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/changelog/108333.yaml b/docs/changelog/108333.yaml new file mode 100644 index 0000000000000..c3152500ce1b2 --- /dev/null +++ b/docs/changelog/108333.yaml @@ -0,0 +1,5 @@ +pr: 108333 +summary: Allow `read_slm` to call GET /_slm/status +area: ILM+SLM +type: bug +issues: [] diff --git a/docs/changelog/108349.yaml b/docs/changelog/108349.yaml new file mode 100644 index 0000000000000..6d9ea3d658dca --- /dev/null +++ b/docs/changelog/108349.yaml @@ -0,0 +1,6 @@ +pr: 108349 +summary: "Ecs@mappings: reduce scope for `ecs_geo_point`" +area: Data streams +type: bug +issues: + - 108338 diff --git a/docs/changelog/108365.yaml b/docs/changelog/108365.yaml new file mode 100644 index 0000000000000..d94486e2f3ea7 --- /dev/null +++ b/docs/changelog/108365.yaml @@ -0,0 +1,5 @@ +pr: 108365 +summary: "[Bugfix] Connector API - fix status serialisation issue in termquery" +area: Application +type: bug +issues: [] diff --git a/docs/changelog/108379.yaml b/docs/changelog/108379.yaml new file mode 100644 index 0000000000000..312856a5db33d --- /dev/null +++ b/docs/changelog/108379.yaml @@ -0,0 +1,5 @@ +pr: 108379 +summary: Create a new `NodeRequest` for every `NodesDataTiersUsageTransport` use +area: Indices APIs +type: bug +issues: [] diff --git a/docs/changelog/108394.yaml b/docs/changelog/108394.yaml new file mode 100644 index 0000000000000..58f48fa548c6e --- /dev/null +++ b/docs/changelog/108394.yaml @@ -0,0 +1,6 @@ +pr: 108394 +summary: Handle `IndexNotFoundException` +area: Transform +type: bug +issues: + - 107263 diff --git a/docs/changelog/108396.yaml b/docs/changelog/108396.yaml new file mode 100644 index 0000000000000..63937646b755c --- /dev/null +++ b/docs/changelog/108396.yaml @@ -0,0 +1,6 @@ +pr: 108396 +summary: "Apm-data: improve default pipeline performance" +area: Data streams +type: enhancement +issues: + - 108290 diff --git a/docs/changelog/108410.yaml b/docs/changelog/108410.yaml new file mode 100644 index 0000000000000..5fd831231a3be --- /dev/null +++ b/docs/changelog/108410.yaml @@ -0,0 +1,5 @@ +pr: 108410 +summary: GeoIP tasks should wait longer for master +area: Ingest Node +type: bug +issues: [] diff --git a/docs/changelog/108429.yaml b/docs/changelog/108429.yaml new file mode 100644 index 0000000000000..562454a0de256 --- /dev/null +++ b/docs/changelog/108429.yaml @@ -0,0 +1,6 @@ +pr: 108429 +summary: Fix `ClassCastException` in Significant Terms +area: Aggregations +type: bug +issues: + - 108427 diff --git a/docs/changelog/108431.yaml b/docs/changelog/108431.yaml new file mode 100644 index 0000000000000..84607b1b99ac3 --- /dev/null +++ b/docs/changelog/108431.yaml @@ -0,0 +1,5 @@ +pr: 108431 +summary: "ESQL: Disable quoting in FROM command" +area: ES|QL +type: bug +issues: [] diff --git a/docs/changelog/108444.yaml b/docs/changelog/108444.yaml new file mode 100644 index 0000000000000..c946ab24f939a --- /dev/null +++ b/docs/changelog/108444.yaml @@ -0,0 +1,5 @@ +pr: 108444 +summary: "Apm-data: ignore malformed fields, and too many dynamic fields" +area: Data streams +type: enhancement +issues: [] diff --git a/docs/changelog/108459.yaml b/docs/changelog/108459.yaml new file mode 100644 index 0000000000000..5e05797f284be --- /dev/null +++ b/docs/changelog/108459.yaml @@ -0,0 +1,6 @@ +pr: 108459 +summary: Do not use global ordinals strategy if the leaf reader context cannot be + obtained +area: Machine Learning +type: bug +issues: [] diff --git a/docs/internal/DistributedArchitectureGuide.md b/docs/internal/DistributedArchitectureGuide.md index b8fb92b1ea15d..59305c6305737 100644 --- a/docs/internal/DistributedArchitectureGuide.md +++ b/docs/internal/DistributedArchitectureGuide.md @@ -10,70 +10,7 @@ ### ActionListener -Callbacks are used extensively throughout Elasticsearch because they enable us to write asynchronous and nonblocking code, i.e. code which -doesn't necessarily compute a result straight away but also doesn't block the calling thread waiting for the result to become available. -They support several useful control flows: - -- They can be completed immediately on the calling thread. -- They can be completed concurrently on a different thread. -- They can be stored in a data structure and completed later on when the system reaches a particular state. -- Most commonly, they can be passed on to other methods that themselves require a callback. -- They can be wrapped in another callback which modifies the behaviour of the original callback, perhaps adding some extra code to run - before or after completion, before passing them on. - -`ActionListener` is a general-purpose callback interface that is used extensively across the Elasticsearch codebase. `ActionListener` is -used pretty much everywhere that needs to perform some asynchronous and nonblocking computation. The uniformity makes it easier to compose -parts of the system together without needing to build adapters to convert back and forth between different kinds of callback. It also makes -it easier to develop the skills needed to read and understand all the asynchronous code, although this definitely takes practice and is -certainly not easy in an absolute sense. Finally, it has allowed us to build a rich library for working with `ActionListener` instances -themselves, creating new instances out of existing ones and completing them in interesting ways. See for instance: - -- all the static methods on [ActionListener](https://github.com/elastic/elasticsearch/blob/v8.12.2/server/src/main/java/org/elasticsearch/action/ActionListener.java) itself -- [`ThreadedActionListener`](https://github.com/elastic/elasticsearch/blob/v8.12.2/server/src/main/java/org/elasticsearch/action/support/ThreadedActionListener.java) for forking work elsewhere -- [`RefCountingListener`](https://github.com/elastic/elasticsearch/blob/v8.12.2/server/src/main/java/org/elasticsearch/action/support/RefCountingListener.java) for running work in parallel -- [`SubscribableListener`](https://github.com/elastic/elasticsearch/blob/v8.12.2/server/src/main/java/org/elasticsearch/action/support/SubscribableListener.java) for constructing flexible workflows - -Callback-based asynchronous code can easily call regular synchronous code, but synchronous code cannot run callback-based asynchronous code -without blocking the calling thread until the callback is called back. This blocking is at best undesirable (threads are too expensive to -waste with unnecessary blocking) and at worst outright broken (the blocking can lead to deadlock). Unfortunately this means that most of our -code ends up having to be written with callbacks, simply because it's ultimately calling into some other code that takes a callback. The -entry points for all Elasticsearch APIs are callback-based (e.g. REST APIs all start at -[`org.elasticsearch.rest.BaseRestHandler#prepareRequest`](https://github.com/elastic/elasticsearch/blob/v8.12.2/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java#L158-L171), -and transport APIs all start at -[`org.elasticsearch.action.support.TransportAction#doExecute`](https://github.com/elastic/elasticsearch/blob/v8.12.2/server/src/main/java/org/elasticsearch/action/support/TransportAction.java#L65)) -and the whole system fundamentally works in terms of an event loop (a `io.netty.channel.EventLoop`) which processes network events via -callbacks. - -`ActionListener` is not an _ad-hoc_ invention. Formally speaking, it is our implementation of the general concept of a continuation in the -sense of [_continuation-passing style_](https://en.wikipedia.org/wiki/Continuation-passing_style) (CPS): an extra argument to a function -which defines how to continue the computation when the result is available. This is in contrast to _direct style_ which is the more usual -style of calling methods that return values directly back to the caller so they can continue executing as normal. There's essentially two -ways that computation can continue in Java (it can return a value or it can throw an exception) which is why `ActionListener` has both an -`onResponse()` and an `onFailure()` method. - -CPS is strictly more expressive than direct style: direct code can be mechanically translated into continuation-passing style, but CPS also -enables all sorts of other useful control structures such as forking work onto separate threads, possibly to be executed in parallel, -perhaps even across multiple nodes, or possibly collecting a list of continuations all waiting for the same condition to be satisfied before -proceeding (e.g. -[`SubscribableListener`](https://github.com/elastic/elasticsearch/blob/v8.12.2/server/src/main/java/org/elasticsearch/action/support/SubscribableListener.java) -amongst many others). Some languages have first-class support for continuations (e.g. the `async` and `await` primitives in C#) allowing the -programmer to write code in direct style away from those exotic control structures, but Java does not. That's why we have to manipulate all -the callbacks ourselves. - -Strictly speaking, CPS requires that a computation _only_ continues by calling the continuation. In Elasticsearch, this means that -asynchronous methods must have `void` return type and may not throw any exceptions. This is mostly the case in our code as written today, -and is a good guiding principle, but we don't enforce void exceptionless methods and there are some deviations from this rule. In -particular, it's not uncommon to permit some methods to throw an exception, using things like -[`ActionListener#run`](https://github.com/elastic/elasticsearch/blob/v8.12.2/server/src/main/java/org/elasticsearch/action/ActionListener.java#L381-L390) -(or an equivalent `try ... catch ...` block) further up the stack to handle it. Some methods also take (and may complete) an -`ActionListener` parameter, but still return a value separately for other local synchronous work. - -This pattern is often used in the transport action layer with the use of the -[ChannelActionListener](https://github.com/elastic/elasticsearch/blob/v8.12.2/server/src/main/java/org/elasticsearch/action/support/ChannelActionListener.java) -class, which wraps a `TransportChannel` produced by the transport layer. `TransportChannel` implementations can hold a reference to a Netty -channel with which to pass the response back to the network caller. Netty has a many-to-one association of network callers to channels, so a -call taking a long time generally won't hog resources: it's cheap. A transport action can take hours to respond and that's alright, barring -caller timeouts. +See the [Javadocs for `ActionListener`](https://github.com/elastic/elasticsearch/blob/main/server/src/main/java/org/elasticsearch/action/ActionListener.java) (TODO: add useful starter references and explanations for a range of Listener classes. Reference the Netty section.) @@ -133,6 +70,14 @@ are only used for internode operations/communications. ### Work Queues +### RestClient + +The `RestClient` is primarily used in testing, to send requests against cluster nodes in the same format as would users. There +are some uses of `RestClient`, via `RestClientBuilder`, in the production code. For example, remote reindex leverages the +`RestClient` internally as the REST client to the remote elasticsearch cluster, and to take advantage of the compatibility of +`RestClient` requests with much older elasticsearch versions. The `RestClient` is also used externally by the `Java API Client` +to communicate with Elasticsearch. + # Cluster Coordination (Sketch of important classes? Might inform more sections to add for details.) diff --git a/docs/internal/GeneralArchitectureGuide.md b/docs/internal/GeneralArchitectureGuide.md index f865277d07f8f..a2dadb70bf975 100644 --- a/docs/internal/GeneralArchitectureGuide.md +++ b/docs/internal/GeneralArchitectureGuide.md @@ -6,6 +6,66 @@ ## Settings +Elasticsearch supports [cluster-level settings][] and [index-level settings][], configurable via [node-level file settings][] +(e.g. `elasticsearch.yml` file), command line arguments and REST APIs. + +### Declaring a Setting + +[cluster-level settings]: https://www.elastic.co/guide/en/elasticsearch/reference/current/cluster-update-settings.html +[index-level settings]: https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-update-settings.html +[node-level file settings]: https://www.elastic.co/guide/en/elasticsearch/reference/current/settings.html + +The [Setting][] class is the building block for Elasticsearch server settings. Each `Setting` can take multiple [Property][] +declarations to define setting characteristics. All setting values first come from the node-local `elasticsearch.yml` file, +if they are set therein, before falling back to the default specified in their `Setting` declaration. [A setting][] with +`Property.Dynamic` can be updated during runtime, but must be paired with a [local volatile variable like this one][] and +registered in the `ClusterSettings` via a utility like [ClusterSettings#initializeAndWatch()][] to catch and immediately +apply dynamic changes. NB that a common dynamic Setting bug is always reading the value directly from [Metadata#settings()][], +which holds the default and dynamically updated values, but _not_ the node-local `elasticsearch.yml` value. The scope of a +Setting must also be declared, such as `Property.IndexScope` for a setting that applies to indexes, or `Property.NodeScope` +for a cluster-level setting. + +[Setting]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/common/settings/Setting.java#L57-L80 +[Property]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/common/settings/Setting.java#L82 +[A setting]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java#L111-L117 +[local volatile variable like this one]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java#L123 +[ClusterSettings#initializeAndWatch()]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java#L145 +[Metadata#settings()]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java#L713-L715 + +[ClusterSettings][] tracks the [core Elasticsearch settings][]. Ultimately the `ClusterSettings` get loaded via the +[SettingsModule][]. Additional settings from the various plugins are [collected during node construction] and passed into the +[SettingsModule constructor][]. The Plugin interface has a [getSettings()][] method via which each plugin can declare additional +settings. + +[ClusterSettings]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java#L138 +[core Elasticsearch settings]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java#L204-L586 +[SettingsModule]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/common/settings/SettingsModule.java#L54 +[collected during node construction]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/node/NodeConstruction.java#L483 +[SettingsModule constructor]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/node/NodeConstruction.java#L491-L495 +[getSettings()]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/plugins/Plugin.java#L203-L208 + +### Dynamically updating a Setting + +Externally, [TransportClusterUpdateSettingsAction][] and [TransportUpdateSettingsAction][] (and the corresponding REST endpoints) +allow users to dynamically change cluster and index settings, respectively. Internally, `AbstractScopedSettings` (parent class +of `ClusterSettings`) has various helper methods to track dynamic changes: it keeps a [registry of `SettingUpdater`][] consumer +lambdas to run updates when settings are changed in the cluster state. The `ClusterApplierService` [sends setting updates][] +through to the `AbstractScopedSettings`, invoking the consumers registered therein for each updated setting. + +[TransportClusterUpdateSettingsAction]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/TransportClusterUpdateSettingsAction.java#L154-L160 +[TransportUpdateSettingsAction]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/action/admin/indices/settings/put/TransportUpdateSettingsAction.java#L96-L101 +[registry of `SettingUpdater`]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java#L379-L381 +[sends setting updates]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java#L490-L494 + +Index settings are always persisted. They can only be modified on an existing index, and setting values are persisted as part +of the `IndexMetadata`. Cluster settings, however, can be either persisted or transient depending on how they are tied to +[Metadata][] ([applied here][]). Changes to persisted cluster settings will survive a full cluster restart; whereas changes +made to transient cluster settings will reset to their default values, or the `elasticsearch.yml` values, if the cluster +state must ever be reloaded from persisted state. + +[Metadata]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java#L212-L213 +[applied here]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java#L2437 + ## Deprecations ## Plugins diff --git a/docs/reference/alias.asciidoc b/docs/reference/alias.asciidoc index e5c2db65778d8..9d784f530d63c 100644 --- a/docs/reference/alias.asciidoc +++ b/docs/reference/alias.asciidoc @@ -358,6 +358,8 @@ POST _aliases ---- // TEST[s/^/PUT my-index-2099.05.06-000001\n/] +NOTE: Filters are only applied when using the <>, and are not applied when <>. + [discrete] [[alias-routing]] === Routing diff --git a/docs/reference/esql/esql-commands.asciidoc b/docs/reference/esql/esql-commands.asciidoc index 708127718fe38..1f07361b89aac 100644 --- a/docs/reference/esql/esql-commands.asciidoc +++ b/docs/reference/esql/esql-commands.asciidoc @@ -39,7 +39,7 @@ image::images/esql/processing-command.svg[A processing command changing an input * <> * <> * <> -* <> +* experimental:[] <> * <> * <> * <> diff --git a/docs/reference/esql/esql-get-started.asciidoc b/docs/reference/esql/esql-get-started.asciidoc index 663b2f8ecd249..b7928898a3bbb 100644 --- a/docs/reference/esql/esql-get-started.asciidoc +++ b/docs/reference/esql/esql-get-started.asciidoc @@ -1,12 +1,9 @@ [[esql-getting-started]] == Getting started with {esql} queries - ++++ Getting started ++++ -preview::["Do not use {esql} on production environments. This functionality is in technical preview and may be changed or removed in a future release. Elastic will work to fix any issues, but features in technical preview are not subject to the support SLA of official GA features."] - This guide shows how you can use {esql} to query and aggregate your data. [TIP] diff --git a/docs/reference/esql/esql-syntax.asciidoc b/docs/reference/esql/esql-syntax.asciidoc index c5d56ef15fdfd..c7f741d064310 100644 --- a/docs/reference/esql/esql-syntax.asciidoc +++ b/docs/reference/esql/esql-syntax.asciidoc @@ -160,14 +160,15 @@ Datetime intervals and timespans can be expressed using timespan literals. Timespan literals are a combination of a number and a qualifier. These qualifiers are supported: -* `millisecond`/`milliseconds` -* `second`/`seconds` -* `minute`/`minutes` -* `hour`/`hours` -* `day`/`days` -* `week`/`weeks` -* `month`/`months` -* `year`/`years` +* `millisecond`/`milliseconds`/`ms` +* `second`/`seconds`/`sec`/`s` +* `minute`/`minutes`/`min` +* `hour`/`hours`/`h` +* `day`/`days`/`d` +* `week`/`weeks`/`w` +* `month`/`months`/`mo` +* `quarter`/`quarters`/`q` +* `year`/`years`/`yr`/`y` Timespan literals are not whitespace sensitive. These expressions are all valid: diff --git a/docs/reference/esql/functions/coalesce.asciidoc b/docs/reference/esql/functions/coalesce.asciidoc deleted file mode 100644 index 2d8c0f379c82e..0000000000000 --- a/docs/reference/esql/functions/coalesce.asciidoc +++ /dev/null @@ -1,13 +0,0 @@ -[discrete] -[[esql-coalesce]] -=== `COALESCE` - -*Syntax* - -[source,esql] ----- -COALESCE(expression1 [, ..., expressionN]) ----- -include::parameters/coalesce.asciidoc[] -include::description/coalesce.asciidoc[] -include::examples/coalesce.asciidoc[] diff --git a/docs/reference/esql/functions/conditional-functions-and-expressions.asciidoc b/docs/reference/esql/functions/conditional-functions-and-expressions.asciidoc index d835a14856c03..081e3b8589dba 100644 --- a/docs/reference/esql/functions/conditional-functions-and-expressions.asciidoc +++ b/docs/reference/esql/functions/conditional-functions-and-expressions.asciidoc @@ -15,7 +15,7 @@ manner. {esql} supports these conditional functions: * <> // end::cond_list[] -include::case.asciidoc[] -include::coalesce.asciidoc[] -include::greatest.asciidoc[] -include::least.asciidoc[] +include::layout/case.asciidoc[] +include::layout/coalesce.asciidoc[] +include::layout/greatest.asciidoc[] +include::layout/least.asciidoc[] diff --git a/docs/reference/esql/functions/date-time-functions.asciidoc b/docs/reference/esql/functions/date-time-functions.asciidoc index 8ce26eaabe381..eceb6378426a2 100644 --- a/docs/reference/esql/functions/date-time-functions.asciidoc +++ b/docs/reference/esql/functions/date-time-functions.asciidoc @@ -21,4 +21,4 @@ include::layout/date_extract.asciidoc[] include::layout/date_format.asciidoc[] include::layout/date_parse.asciidoc[] include::layout/date_trunc.asciidoc[] -include::now.asciidoc[] +include::layout/now.asciidoc[] diff --git a/docs/reference/esql/functions/description/case.asciidoc b/docs/reference/esql/functions/description/case.asciidoc index 5c98a7a2620d0..c3e80301fbc31 100644 --- a/docs/reference/esql/functions/description/case.asciidoc +++ b/docs/reference/esql/functions/description/case.asciidoc @@ -2,4 +2,4 @@ *Description* -Accepts pairs of conditions and values. The function returns the value that belongs to the first condition that evaluates to true. +Accepts pairs of conditions and values. The function returns the value that belongs to the first condition that evaluates to `true`. If the number of arguments is odd, the last argument is the default value which is returned when no condition matches. If the number of arguments is even, and no condition matches, the function returns `null`. diff --git a/docs/reference/esql/functions/description/date_diff.asciidoc b/docs/reference/esql/functions/description/date_diff.asciidoc index 3dd19b5885902..dbc03d59a2bf7 100644 --- a/docs/reference/esql/functions/description/date_diff.asciidoc +++ b/docs/reference/esql/functions/description/date_diff.asciidoc @@ -25,3 +25,9 @@ s|abbreviations | microsecond | microseconds, mcs | nanosecond | nanoseconds, ns |=== + +Note that while there is an overlap between the function's supported units and +{esql}'s supported time span literals, these sets are distinct and not +interchangeable. Similarly, the supported abbreviations are conveniently shared +with implementations of this function in other established products and not +necessarily common with the date-time nomenclature used by {es}. diff --git a/docs/reference/esql/functions/description/greatest.asciidoc b/docs/reference/esql/functions/description/greatest.asciidoc index 3c7cfd3bfb14c..ed705d0bbb59e 100644 --- a/docs/reference/esql/functions/description/greatest.asciidoc +++ b/docs/reference/esql/functions/description/greatest.asciidoc @@ -2,4 +2,6 @@ *Description* -Returns the maximum value from many columns. +Returns the maximum value from multiple columns. This is similar to <> except it is intended to run on multiple columns at once. + +NOTE: When run on `keyword` or `text` fields, this returns the last string in alphabetical order. When run on `boolean` columns this will return `true` if any values are `true`. diff --git a/docs/reference/esql/functions/description/least.asciidoc b/docs/reference/esql/functions/description/least.asciidoc index 2aeb1f85aa51a..c5daf0bc79ae0 100644 --- a/docs/reference/esql/functions/description/least.asciidoc +++ b/docs/reference/esql/functions/description/least.asciidoc @@ -2,4 +2,4 @@ *Description* -Returns the minimum value from many columns. +Returns the minimum value from multiple columns. This is similar to <> except it is intended to run on multiple columns at once. diff --git a/docs/reference/esql/functions/description/now.asciidoc b/docs/reference/esql/functions/description/now.asciidoc new file mode 100644 index 0000000000000..4852c98b4980a --- /dev/null +++ b/docs/reference/esql/functions/description/now.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Returns current date and time. diff --git a/docs/reference/esql/functions/description/st_contains.asciidoc b/docs/reference/esql/functions/description/st_contains.asciidoc index 678fde7f5d98b..a2c81b9d24a10 100644 --- a/docs/reference/esql/functions/description/st_contains.asciidoc +++ b/docs/reference/esql/functions/description/st_contains.asciidoc @@ -2,4 +2,4 @@ *Description* -Returns whether the first geometry contains the second geometry. +Returns whether the first geometry contains the second geometry. This is the inverse of the <> function. diff --git a/docs/reference/esql/functions/description/st_disjoint.asciidoc b/docs/reference/esql/functions/description/st_disjoint.asciidoc index 95ab02a39614a..461dd61daef7a 100644 --- a/docs/reference/esql/functions/description/st_disjoint.asciidoc +++ b/docs/reference/esql/functions/description/st_disjoint.asciidoc @@ -2,4 +2,4 @@ *Description* -Returns whether the two geometries or geometry columns are disjoint. +Returns whether the two geometries or geometry columns are disjoint. This is the inverse of the <> function. In mathematical terms: ST_Disjoint(A, B) ⇔ A ⋂ B = ∅ diff --git a/docs/reference/esql/functions/description/st_intersects.asciidoc b/docs/reference/esql/functions/description/st_intersects.asciidoc index b736ba29a6c8b..48fd7bdb2f338 100644 --- a/docs/reference/esql/functions/description/st_intersects.asciidoc +++ b/docs/reference/esql/functions/description/st_intersects.asciidoc @@ -2,4 +2,4 @@ *Description* -Returns whether the two geometries or geometry columns intersect. +Returns true if two geometries intersect. They intersect if they have any point in common, including their interior points (points along lines or within polygons). This is the inverse of the <> function. In mathematical terms: ST_Intersects(A, B) ⇔ A ⋂ B ≠ ∅ diff --git a/docs/reference/esql/functions/description/st_within.asciidoc b/docs/reference/esql/functions/description/st_within.asciidoc index 890f28cb769b0..38a34f518234a 100644 --- a/docs/reference/esql/functions/description/st_within.asciidoc +++ b/docs/reference/esql/functions/description/st_within.asciidoc @@ -2,4 +2,4 @@ *Description* -Returns whether the first geometry is within the second geometry. +Returns whether the first geometry is within the second geometry. This is the inverse of the <> function. diff --git a/docs/reference/esql/functions/description/st_x.asciidoc b/docs/reference/esql/functions/description/st_x.asciidoc index beb077bea332c..33d867f862429 100644 --- a/docs/reference/esql/functions/description/st_x.asciidoc +++ b/docs/reference/esql/functions/description/st_x.asciidoc @@ -2,4 +2,4 @@ *Description* -Extracts the x-coordinate from a point geometry. +Extracts the `x` coordinate from the supplied point. If the points is of type `geo_point` this is equivalent to extracting the `longitude` value. diff --git a/docs/reference/esql/functions/description/st_y.asciidoc b/docs/reference/esql/functions/description/st_y.asciidoc index 19c371d2ef931..b03956a51e1a6 100644 --- a/docs/reference/esql/functions/description/st_y.asciidoc +++ b/docs/reference/esql/functions/description/st_y.asciidoc @@ -2,4 +2,4 @@ *Description* -Extracts the y-coordinate from a point geometry. +Extracts the `y` coordinate from the supplied point. If the points is of type `geo_point` this is equivalent to extracting the `latitude` value. diff --git a/docs/reference/esql/functions/examples/case.asciidoc b/docs/reference/esql/functions/examples/case.asciidoc new file mode 100644 index 0000000000000..c5c766512ce0b --- /dev/null +++ b/docs/reference/esql/functions/examples/case.asciidoc @@ -0,0 +1,32 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Examples* + +Determine whether employees are monolingual, bilingual, or polyglot: +[source.merge.styled,esql] +---- +include::{esql-specs}/docs.csv-spec[tag=case] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/docs.csv-spec[tag=case-result] +|=== +Calculate the total connection success rate based on log messages: +[source.merge.styled,esql] +---- +include::{esql-specs}/conditional.csv-spec[tag=docsCaseSuccessRate] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/conditional.csv-spec[tag=docsCaseSuccessRate-result] +|=== +Calculate an hourly error rate as a percentage of the total number of log messages: +[source.merge.styled,esql] +---- +include::{esql-specs}/conditional.csv-spec[tag=docsCaseHourlyErrorRate] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/conditional.csv-spec[tag=docsCaseHourlyErrorRate-result] +|=== + diff --git a/docs/reference/esql/functions/examples/greatest.asciidoc b/docs/reference/esql/functions/examples/greatest.asciidoc new file mode 100644 index 0000000000000..bd89ad1b3cdd1 --- /dev/null +++ b/docs/reference/esql/functions/examples/greatest.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/math.csv-spec[tag=greatest] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/math.csv-spec[tag=greatest-result] +|=== + diff --git a/docs/reference/esql/functions/examples/least.asciidoc b/docs/reference/esql/functions/examples/least.asciidoc new file mode 100644 index 0000000000000..67fc5260f6391 --- /dev/null +++ b/docs/reference/esql/functions/examples/least.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/math.csv-spec[tag=least] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/math.csv-spec[tag=least-result] +|=== + diff --git a/docs/reference/esql/functions/examples/now.asciidoc b/docs/reference/esql/functions/examples/now.asciidoc new file mode 100644 index 0000000000000..b8953de93724c --- /dev/null +++ b/docs/reference/esql/functions/examples/now.asciidoc @@ -0,0 +1,22 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Examples* + +[source.merge.styled,esql] +---- +include::{esql-specs}/date.csv-spec[tag=docsNow] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/date.csv-spec[tag=docsNow-result] +|=== +To retrieve logs from the last hour: +[source.merge.styled,esql] +---- +include::{esql-specs}/date.csv-spec[tag=docsNowWhere] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/date.csv-spec[tag=docsNowWhere-result] +|=== + diff --git a/docs/reference/esql/functions/examples/st_x.asciidoc b/docs/reference/esql/functions/examples/st_x.asciidoc new file mode 100644 index 0000000000000..895e76c6c04e2 --- /dev/null +++ b/docs/reference/esql/functions/examples/st_x.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/spatial.csv-spec[tag=st_x_y] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/spatial.csv-spec[tag=st_x_y-result] +|=== + diff --git a/docs/reference/esql/functions/examples/st_y.asciidoc b/docs/reference/esql/functions/examples/st_y.asciidoc new file mode 100644 index 0000000000000..895e76c6c04e2 --- /dev/null +++ b/docs/reference/esql/functions/examples/st_y.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/spatial.csv-spec[tag=st_x_y] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/spatial.csv-spec[tag=st_x_y-result] +|=== + diff --git a/docs/reference/esql/functions/greatest.asciidoc b/docs/reference/esql/functions/greatest.asciidoc deleted file mode 100644 index 003f1f46e6db5..0000000000000 --- a/docs/reference/esql/functions/greatest.asciidoc +++ /dev/null @@ -1,38 +0,0 @@ -[discrete] -[[esql-greatest]] -=== `GREATEST` - -*Syntax* - -[.text-center] -image::esql/functions/signature/greatest.svg[Embedded,opts=inline] - -*Parameters* - -`first`:: -First of the columns to evaluate. - -`rest`:: -The rest of the columns to evaluate. - -*Description* - -Returns the maximum value from multiple columns. This is similar to <> -except it is intended to run on multiple columns at once. - -NOTE: When run on `keyword` or `text` fields, this returns the last string - in alphabetical order. When run on `boolean` columns this will return - `true` if any values are `true`. - -include::types/greatest.asciidoc[] - -*Example* - -[source.merge.styled,esql] ----- -include::{esql-specs}/math.csv-spec[tag=greatest] ----- -[%header.monospaced.styled,format=dsv,separator=|] -|=== -include::{esql-specs}/math.csv-spec[tag=greatest-result] -|=== diff --git a/docs/reference/esql/functions/kibana/definition/case.json b/docs/reference/esql/functions/kibana/definition/case.json index 73bc215ac6ade..5959eed62d37b 100644 --- a/docs/reference/esql/functions/kibana/definition/case.json +++ b/docs/reference/esql/functions/kibana/definition/case.json @@ -2,7 +2,7 @@ "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", "type" : "eval", "name" : "case", - "description" : "Accepts pairs of conditions and values.\nThe function returns the value that belongs to the first condition that evaluates to true.", + "description" : "Accepts pairs of conditions and values. The function returns the value that\nbelongs to the first condition that evaluates to `true`.\n\nIf the number of arguments is odd, the last argument is the default value which\nis returned when no condition matches. If the number of arguments is even, and\nno condition matches, the function returns `null`.", "signatures" : [ { "params" : [ @@ -10,23 +10,226 @@ "name" : "condition", "type" : "boolean", "optional" : false, - "description" : "" + "description" : "A condition." + }, + { + "name" : "trueValue", + "type" : "boolean", + "optional" : false, + "description" : "The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches." + } + ], + "variadic" : true, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "condition", + "type" : "boolean", + "optional" : false, + "description" : "A condition." + }, + { + "name" : "trueValue", + "type" : "cartesian_point", + "optional" : false, + "description" : "The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches." + } + ], + "variadic" : true, + "returnType" : "cartesian_point" + }, + { + "params" : [ + { + "name" : "condition", + "type" : "boolean", + "optional" : false, + "description" : "A condition." + }, + { + "name" : "trueValue", + "type" : "datetime", + "optional" : false, + "description" : "The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches." + } + ], + "variadic" : true, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "condition", + "type" : "boolean", + "optional" : false, + "description" : "A condition." + }, + { + "name" : "trueValue", + "type" : "double", + "optional" : false, + "description" : "The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches." + } + ], + "variadic" : true, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "condition", + "type" : "boolean", + "optional" : false, + "description" : "A condition." + }, + { + "name" : "trueValue", + "type" : "geo_point", + "optional" : false, + "description" : "The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches." + } + ], + "variadic" : true, + "returnType" : "geo_point" + }, + { + "params" : [ + { + "name" : "condition", + "type" : "boolean", + "optional" : false, + "description" : "A condition." + }, + { + "name" : "trueValue", + "type" : "integer", + "optional" : false, + "description" : "The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches." + } + ], + "variadic" : true, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "condition", + "type" : "boolean", + "optional" : false, + "description" : "A condition." + }, + { + "name" : "trueValue", + "type" : "ip", + "optional" : false, + "description" : "The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches." + } + ], + "variadic" : true, + "returnType" : "ip" + }, + { + "params" : [ + { + "name" : "condition", + "type" : "boolean", + "optional" : false, + "description" : "A condition." }, { "name" : "trueValue", "type" : "keyword", "optional" : false, - "description" : "" + "description" : "The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches." }, { "name" : "falseValue", "type" : "keyword", "optional" : true, - "description" : "" + "description" : "The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches." } ], "variadic" : true, "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "condition", + "type" : "boolean", + "optional" : false, + "description" : "A condition." + }, + { + "name" : "trueValue", + "type" : "long", + "optional" : false, + "description" : "The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches." + } + ], + "variadic" : true, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "condition", + "type" : "boolean", + "optional" : false, + "description" : "A condition." + }, + { + "name" : "trueValue", + "type" : "text", + "optional" : false, + "description" : "The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches." + } + ], + "variadic" : true, + "returnType" : "text" + }, + { + "params" : [ + { + "name" : "condition", + "type" : "boolean", + "optional" : false, + "description" : "A condition." + }, + { + "name" : "trueValue", + "type" : "unsigned_long", + "optional" : false, + "description" : "The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches." + } + ], + "variadic" : true, + "returnType" : "unsigned_long" + }, + { + "params" : [ + { + "name" : "condition", + "type" : "boolean", + "optional" : false, + "description" : "A condition." + }, + { + "name" : "trueValue", + "type" : "version", + "optional" : false, + "description" : "The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches." + } + ], + "variadic" : true, + "returnType" : "version" } + ], + "examples" : [ + "FROM employees\n| EVAL type = CASE(\n languages <= 1, \"monolingual\",\n languages <= 2, \"bilingual\",\n \"polyglot\")\n| KEEP emp_no, languages, type", + "FROM sample_data\n| EVAL successful = CASE(\n STARTS_WITH(message, \"Connected to\"), 1,\n message == \"Connection error\", 0\n )\n| STATS success_rate = AVG(successful)", + "FROM sample_data\n| EVAL error = CASE(message LIKE \"*error*\", 1, 0)\n| EVAL hour = DATE_TRUNC(1 hour, @timestamp)\n| STATS error_rate = AVG(error) by hour\n| SORT hour" ] } diff --git a/docs/reference/esql/functions/kibana/definition/coalesce.json b/docs/reference/esql/functions/kibana/definition/coalesce.json index 87feead06d091..1081b42839577 100644 --- a/docs/reference/esql/functions/kibana/definition/coalesce.json +++ b/docs/reference/esql/functions/kibana/definition/coalesce.json @@ -10,7 +10,7 @@ "name" : "first", "type" : "boolean", "optional" : false, - "description" : "Expression to evaluate" + "description" : "Expression to evaluate." } ], "variadic" : true, @@ -22,13 +22,13 @@ "name" : "first", "type" : "boolean", "optional" : false, - "description" : "Expression to evaluate" + "description" : "Expression to evaluate." }, { "name" : "rest", "type" : "boolean", "optional" : true, - "description" : "Other expression to evaluate" + "description" : "Other expression to evaluate." } ], "variadic" : true, @@ -40,7 +40,7 @@ "name" : "first", "type" : "integer", "optional" : false, - "description" : "Expression to evaluate" + "description" : "Expression to evaluate." } ], "variadic" : true, @@ -52,13 +52,13 @@ "name" : "first", "type" : "integer", "optional" : false, - "description" : "Expression to evaluate" + "description" : "Expression to evaluate." }, { "name" : "rest", "type" : "integer", "optional" : true, - "description" : "Other expression to evaluate" + "description" : "Other expression to evaluate." } ], "variadic" : true, @@ -70,7 +70,7 @@ "name" : "first", "type" : "keyword", "optional" : false, - "description" : "Expression to evaluate" + "description" : "Expression to evaluate." } ], "variadic" : true, @@ -82,13 +82,13 @@ "name" : "first", "type" : "keyword", "optional" : false, - "description" : "Expression to evaluate" + "description" : "Expression to evaluate." }, { "name" : "rest", "type" : "keyword", "optional" : true, - "description" : "Other expression to evaluate" + "description" : "Other expression to evaluate." } ], "variadic" : true, @@ -100,7 +100,7 @@ "name" : "first", "type" : "long", "optional" : false, - "description" : "Expression to evaluate" + "description" : "Expression to evaluate." } ], "variadic" : true, @@ -112,13 +112,13 @@ "name" : "first", "type" : "long", "optional" : false, - "description" : "Expression to evaluate" + "description" : "Expression to evaluate." }, { "name" : "rest", "type" : "long", "optional" : true, - "description" : "Other expression to evaluate" + "description" : "Other expression to evaluate." } ], "variadic" : true, @@ -130,7 +130,7 @@ "name" : "first", "type" : "text", "optional" : false, - "description" : "Expression to evaluate" + "description" : "Expression to evaluate." } ], "variadic" : true, @@ -142,13 +142,13 @@ "name" : "first", "type" : "text", "optional" : false, - "description" : "Expression to evaluate" + "description" : "Expression to evaluate." }, { "name" : "rest", "type" : "text", "optional" : true, - "description" : "Other expression to evaluate" + "description" : "Other expression to evaluate." } ], "variadic" : true, diff --git a/docs/reference/esql/functions/kibana/definition/greatest.json b/docs/reference/esql/functions/kibana/definition/greatest.json index f72f54708c6b1..15c9f58d32d3e 100644 --- a/docs/reference/esql/functions/kibana/definition/greatest.json +++ b/docs/reference/esql/functions/kibana/definition/greatest.json @@ -2,7 +2,8 @@ "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", "type" : "eval", "name" : "greatest", - "description" : "Returns the maximum value from many columns.", + "description" : "Returns the maximum value from multiple columns. This is similar to <>\nexcept it is intended to run on multiple columns at once.", + "note" : "When run on `keyword` or `text` fields, this returns the last string in alphabetical order. When run on `boolean` columns this will return `true` if any values are `true`.", "signatures" : [ { "params" : [ @@ -10,7 +11,7 @@ "name" : "first", "type" : "boolean", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." } ], "variadic" : true, @@ -22,13 +23,13 @@ "name" : "first", "type" : "boolean", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." }, { "name" : "rest", "type" : "boolean", "optional" : true, - "description" : "" + "description" : "The rest of the columns to evaluate." } ], "variadic" : true, @@ -40,13 +41,13 @@ "name" : "first", "type" : "double", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." }, { "name" : "rest", "type" : "double", "optional" : true, - "description" : "" + "description" : "The rest of the columns to evaluate." } ], "variadic" : true, @@ -58,7 +59,7 @@ "name" : "first", "type" : "integer", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." } ], "variadic" : true, @@ -70,13 +71,13 @@ "name" : "first", "type" : "integer", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." }, { "name" : "rest", "type" : "integer", "optional" : true, - "description" : "" + "description" : "The rest of the columns to evaluate." } ], "variadic" : true, @@ -88,13 +89,13 @@ "name" : "first", "type" : "ip", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." }, { "name" : "rest", "type" : "ip", "optional" : true, - "description" : "" + "description" : "The rest of the columns to evaluate." } ], "variadic" : true, @@ -106,7 +107,7 @@ "name" : "first", "type" : "keyword", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." } ], "variadic" : true, @@ -118,13 +119,13 @@ "name" : "first", "type" : "keyword", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." }, { "name" : "rest", "type" : "keyword", "optional" : true, - "description" : "" + "description" : "The rest of the columns to evaluate." } ], "variadic" : true, @@ -136,7 +137,7 @@ "name" : "first", "type" : "long", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." } ], "variadic" : true, @@ -148,13 +149,13 @@ "name" : "first", "type" : "long", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." }, { "name" : "rest", "type" : "long", "optional" : true, - "description" : "" + "description" : "The rest of the columns to evaluate." } ], "variadic" : true, @@ -166,7 +167,7 @@ "name" : "first", "type" : "text", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." } ], "variadic" : true, @@ -178,13 +179,13 @@ "name" : "first", "type" : "text", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." }, { "name" : "rest", "type" : "text", "optional" : true, - "description" : "" + "description" : "The rest of the columns to evaluate." } ], "variadic" : true, @@ -196,17 +197,20 @@ "name" : "first", "type" : "version", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." }, { "name" : "rest", "type" : "version", "optional" : true, - "description" : "" + "description" : "The rest of the columns to evaluate." } ], "variadic" : true, "returnType" : "version" } + ], + "examples" : [ + "ROW a = 10, b = 20\n| EVAL g = GREATEST(a, b)" ] } diff --git a/docs/reference/esql/functions/kibana/definition/least.json b/docs/reference/esql/functions/kibana/definition/least.json index 66efedc0c9fe5..0b922ad6ad3c2 100644 --- a/docs/reference/esql/functions/kibana/definition/least.json +++ b/docs/reference/esql/functions/kibana/definition/least.json @@ -2,7 +2,7 @@ "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", "type" : "eval", "name" : "least", - "description" : "Returns the minimum value from many columns.", + "description" : "Returns the minimum value from multiple columns. This is similar to <> except it is intended to run on multiple columns at once.", "signatures" : [ { "params" : [ @@ -10,7 +10,7 @@ "name" : "first", "type" : "boolean", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." } ], "variadic" : true, @@ -22,13 +22,13 @@ "name" : "first", "type" : "boolean", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." }, { "name" : "rest", "type" : "boolean", "optional" : true, - "description" : "" + "description" : "The rest of the columns to evaluate." } ], "variadic" : true, @@ -40,13 +40,13 @@ "name" : "first", "type" : "double", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." }, { "name" : "rest", "type" : "double", "optional" : true, - "description" : "" + "description" : "The rest of the columns to evaluate." } ], "variadic" : true, @@ -58,7 +58,7 @@ "name" : "first", "type" : "integer", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." } ], "variadic" : true, @@ -70,13 +70,13 @@ "name" : "first", "type" : "integer", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." }, { "name" : "rest", "type" : "integer", "optional" : true, - "description" : "" + "description" : "The rest of the columns to evaluate." } ], "variadic" : true, @@ -88,13 +88,13 @@ "name" : "first", "type" : "ip", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." }, { "name" : "rest", "type" : "ip", "optional" : true, - "description" : "" + "description" : "The rest of the columns to evaluate." } ], "variadic" : true, @@ -106,7 +106,7 @@ "name" : "first", "type" : "keyword", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." } ], "variadic" : true, @@ -118,13 +118,13 @@ "name" : "first", "type" : "keyword", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." }, { "name" : "rest", "type" : "keyword", "optional" : true, - "description" : "" + "description" : "The rest of the columns to evaluate." } ], "variadic" : true, @@ -136,7 +136,7 @@ "name" : "first", "type" : "long", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." } ], "variadic" : true, @@ -148,13 +148,13 @@ "name" : "first", "type" : "long", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." }, { "name" : "rest", "type" : "long", "optional" : true, - "description" : "" + "description" : "The rest of the columns to evaluate." } ], "variadic" : true, @@ -166,7 +166,7 @@ "name" : "first", "type" : "text", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." } ], "variadic" : true, @@ -178,13 +178,13 @@ "name" : "first", "type" : "text", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." }, { "name" : "rest", "type" : "text", "optional" : true, - "description" : "" + "description" : "The rest of the columns to evaluate." } ], "variadic" : true, @@ -196,17 +196,20 @@ "name" : "first", "type" : "version", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." }, { "name" : "rest", "type" : "version", "optional" : true, - "description" : "" + "description" : "The rest of the columns to evaluate." } ], "variadic" : true, "returnType" : "version" } + ], + "examples" : [ + "ROW a = 10, b = 20\n| EVAL l = LEAST(a, b)" ] } diff --git a/docs/reference/esql/functions/kibana/definition/now.json b/docs/reference/esql/functions/kibana/definition/now.json new file mode 100644 index 0000000000000..9cdb4945afa2e --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/now.json @@ -0,0 +1,16 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "now", + "description" : "Returns current date and time.", + "signatures" : [ + { + "params" : [ ], + "returnType" : "datetime" + } + ], + "examples" : [ + "ROW current_date = NOW()", + "FROM sample_data\n| WHERE @timestamp > NOW() - 1 hour" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/st_contains.json b/docs/reference/esql/functions/kibana/definition/st_contains.json index f4f8003917908..1ef76e46f371a 100644 --- a/docs/reference/esql/functions/kibana/definition/st_contains.json +++ b/docs/reference/esql/functions/kibana/definition/st_contains.json @@ -2,7 +2,7 @@ "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", "type" : "eval", "name" : "st_contains", - "description" : "Returns whether the first geometry contains the second geometry.", + "description" : "Returns whether the first geometry contains the second geometry.\nThis is the inverse of the <> function.", "signatures" : [ { "params" : [ @@ -10,13 +10,13 @@ "name" : "geomA", "type" : "cartesian_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "cartesian_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -28,13 +28,13 @@ "name" : "geomA", "type" : "cartesian_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "cartesian_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -46,13 +46,13 @@ "name" : "geomA", "type" : "cartesian_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "cartesian_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -64,13 +64,13 @@ "name" : "geomA", "type" : "cartesian_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "cartesian_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -82,13 +82,13 @@ "name" : "geomA", "type" : "geo_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "geo_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -100,13 +100,13 @@ "name" : "geomA", "type" : "geo_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "geo_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -118,13 +118,13 @@ "name" : "geomA", "type" : "geo_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "geo_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -136,13 +136,13 @@ "name" : "geomA", "type" : "geo_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "geo_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, diff --git a/docs/reference/esql/functions/kibana/definition/st_disjoint.json b/docs/reference/esql/functions/kibana/definition/st_disjoint.json index 98647b63ff18f..e408a0f98fe6c 100644 --- a/docs/reference/esql/functions/kibana/definition/st_disjoint.json +++ b/docs/reference/esql/functions/kibana/definition/st_disjoint.json @@ -2,7 +2,7 @@ "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", "type" : "eval", "name" : "st_disjoint", - "description" : "Returns whether the two geometries or geometry columns are disjoint.", + "description" : "Returns whether the two geometries or geometry columns are disjoint.\nThis is the inverse of the <> function.\nIn mathematical terms: ST_Disjoint(A, B) ⇔ A ⋂ B = ∅", "signatures" : [ { "params" : [ @@ -10,13 +10,13 @@ "name" : "geomA", "type" : "cartesian_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "cartesian_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -28,13 +28,13 @@ "name" : "geomA", "type" : "cartesian_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "cartesian_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -46,13 +46,13 @@ "name" : "geomA", "type" : "cartesian_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "cartesian_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -64,13 +64,13 @@ "name" : "geomA", "type" : "cartesian_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "cartesian_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -82,13 +82,13 @@ "name" : "geomA", "type" : "geo_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "geo_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -100,13 +100,13 @@ "name" : "geomA", "type" : "geo_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "geo_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -118,13 +118,13 @@ "name" : "geomA", "type" : "geo_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "geo_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -136,13 +136,13 @@ "name" : "geomA", "type" : "geo_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "geo_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, diff --git a/docs/reference/esql/functions/kibana/definition/st_intersects.json b/docs/reference/esql/functions/kibana/definition/st_intersects.json index ba619fe57ecf5..2f9f255ab1870 100644 --- a/docs/reference/esql/functions/kibana/definition/st_intersects.json +++ b/docs/reference/esql/functions/kibana/definition/st_intersects.json @@ -2,7 +2,7 @@ "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", "type" : "eval", "name" : "st_intersects", - "description" : "Returns whether the two geometries or geometry columns intersect.", + "description" : "Returns true if two geometries intersect.\nThey intersect if they have any point in common, including their interior points\n(points along lines or within polygons).\nThis is the inverse of the <> function.\nIn mathematical terms: ST_Intersects(A, B) ⇔ A ⋂ B ≠ ∅", "signatures" : [ { "params" : [ @@ -10,13 +10,13 @@ "name" : "geomA", "type" : "cartesian_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "cartesian_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -28,13 +28,13 @@ "name" : "geomA", "type" : "cartesian_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "cartesian_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -46,13 +46,13 @@ "name" : "geomA", "type" : "cartesian_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "cartesian_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -64,13 +64,13 @@ "name" : "geomA", "type" : "cartesian_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "cartesian_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -82,13 +82,13 @@ "name" : "geomA", "type" : "geo_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "geo_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -100,13 +100,13 @@ "name" : "geomA", "type" : "geo_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "geo_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -118,13 +118,13 @@ "name" : "geomA", "type" : "geo_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "geo_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -136,13 +136,13 @@ "name" : "geomA", "type" : "geo_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "geo_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, diff --git a/docs/reference/esql/functions/kibana/definition/st_within.json b/docs/reference/esql/functions/kibana/definition/st_within.json index ee98337441ab7..e0cdf62fe0f98 100644 --- a/docs/reference/esql/functions/kibana/definition/st_within.json +++ b/docs/reference/esql/functions/kibana/definition/st_within.json @@ -2,7 +2,7 @@ "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", "type" : "eval", "name" : "st_within", - "description" : "Returns whether the first geometry is within the second geometry.", + "description" : "Returns whether the first geometry is within the second geometry.\nThis is the inverse of the <> function.", "signatures" : [ { "params" : [ @@ -10,13 +10,13 @@ "name" : "geomA", "type" : "cartesian_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "cartesian_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -28,13 +28,13 @@ "name" : "geomA", "type" : "cartesian_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "cartesian_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -46,13 +46,13 @@ "name" : "geomA", "type" : "cartesian_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "cartesian_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -64,13 +64,13 @@ "name" : "geomA", "type" : "cartesian_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "cartesian_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -82,13 +82,13 @@ "name" : "geomA", "type" : "geo_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "geo_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -100,13 +100,13 @@ "name" : "geomA", "type" : "geo_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "geo_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -118,13 +118,13 @@ "name" : "geomA", "type" : "geo_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "geo_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -136,13 +136,13 @@ "name" : "geomA", "type" : "geo_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "geo_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, diff --git a/docs/reference/esql/functions/kibana/definition/st_x.json b/docs/reference/esql/functions/kibana/definition/st_x.json index 57598b3470e11..c3554a2ee808b 100644 --- a/docs/reference/esql/functions/kibana/definition/st_x.json +++ b/docs/reference/esql/functions/kibana/definition/st_x.json @@ -2,7 +2,7 @@ "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", "type" : "eval", "name" : "st_x", - "description" : "Extracts the x-coordinate from a point geometry.", + "description" : "Extracts the `x` coordinate from the supplied point.\nIf the points is of type `geo_point` this is equivalent to extracting the `longitude` value.", "signatures" : [ { "params" : [ @@ -10,7 +10,7 @@ "name" : "point", "type" : "cartesian_point", "optional" : false, - "description" : "" + "description" : "Expression of type `geo_point` or `cartesian_point`. If `null`, the function returns `null`." } ], "variadic" : false, @@ -22,11 +22,14 @@ "name" : "point", "type" : "geo_point", "optional" : false, - "description" : "" + "description" : "Expression of type `geo_point` or `cartesian_point`. If `null`, the function returns `null`." } ], "variadic" : false, "returnType" : "double" } + ], + "examples" : [ + "ROW point = TO_GEOPOINT(\"POINT(42.97109629958868 14.7552534006536)\")\n| EVAL x = ST_X(point), y = ST_Y(point)" ] } diff --git a/docs/reference/esql/functions/kibana/definition/st_y.json b/docs/reference/esql/functions/kibana/definition/st_y.json index 0dacaa56bb8de..2966ae04f75e4 100644 --- a/docs/reference/esql/functions/kibana/definition/st_y.json +++ b/docs/reference/esql/functions/kibana/definition/st_y.json @@ -2,7 +2,7 @@ "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", "type" : "eval", "name" : "st_y", - "description" : "Extracts the y-coordinate from a point geometry.", + "description" : "Extracts the `y` coordinate from the supplied point.\nIf the points is of type `geo_point` this is equivalent to extracting the `latitude` value.", "signatures" : [ { "params" : [ @@ -10,7 +10,7 @@ "name" : "point", "type" : "cartesian_point", "optional" : false, - "description" : "" + "description" : "Expression of type `geo_point` or `cartesian_point`. If `null`, the function returns `null`." } ], "variadic" : false, @@ -22,11 +22,14 @@ "name" : "point", "type" : "geo_point", "optional" : false, - "description" : "" + "description" : "Expression of type `geo_point` or `cartesian_point`. If `null`, the function returns `null`." } ], "variadic" : false, "returnType" : "double" } + ], + "examples" : [ + "ROW point = TO_GEOPOINT(\"POINT(42.97109629958868 14.7552534006536)\")\n| EVAL x = ST_X(point), y = ST_Y(point)" ] } diff --git a/docs/reference/esql/functions/kibana/docs/case.md b/docs/reference/esql/functions/kibana/docs/case.md index e1494a5c2af8c..8bb31ee972759 100644 --- a/docs/reference/esql/functions/kibana/docs/case.md +++ b/docs/reference/esql/functions/kibana/docs/case.md @@ -3,6 +3,18 @@ This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../READ --> ### CASE -Accepts pairs of conditions and values. -The function returns the value that belongs to the first condition that evaluates to true. +Accepts pairs of conditions and values. The function returns the value that +belongs to the first condition that evaluates to `true`. +If the number of arguments is odd, the last argument is the default value which +is returned when no condition matches. If the number of arguments is even, and +no condition matches, the function returns `null`. + +``` +FROM employees +| EVAL type = CASE( + languages <= 1, "monolingual", + languages <= 2, "bilingual", + "polyglot") +| KEEP emp_no, languages, type +``` diff --git a/docs/reference/esql/functions/kibana/docs/greatest.md b/docs/reference/esql/functions/kibana/docs/greatest.md index 3db0c9ed87aa5..4b3b4027381f8 100644 --- a/docs/reference/esql/functions/kibana/docs/greatest.md +++ b/docs/reference/esql/functions/kibana/docs/greatest.md @@ -3,5 +3,11 @@ This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../READ --> ### GREATEST -Returns the maximum value from many columns. +Returns the maximum value from multiple columns. This is similar to <> +except it is intended to run on multiple columns at once. +``` +ROW a = 10, b = 20 +| EVAL g = GREATEST(a, b) +``` +Note: When run on `keyword` or `text` fields, this returns the last string in alphabetical order. When run on `boolean` columns this will return `true` if any values are `true`. diff --git a/docs/reference/esql/functions/kibana/docs/least.md b/docs/reference/esql/functions/kibana/docs/least.md index ff2c19592c8e1..7bbbcf79bc374 100644 --- a/docs/reference/esql/functions/kibana/docs/least.md +++ b/docs/reference/esql/functions/kibana/docs/least.md @@ -3,5 +3,9 @@ This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../READ --> ### LEAST -Returns the minimum value from many columns. +Returns the minimum value from multiple columns. This is similar to <> except it is intended to run on multiple columns at once. +``` +ROW a = 10, b = 20 +| EVAL l = LEAST(a, b) +``` diff --git a/docs/reference/esql/functions/kibana/docs/now.md b/docs/reference/esql/functions/kibana/docs/now.md new file mode 100644 index 0000000000000..5143dc843ebd8 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/now.md @@ -0,0 +1,10 @@ + + +### NOW +Returns current date and time. + +``` +ROW current_date = NOW() +``` diff --git a/docs/reference/esql/functions/kibana/docs/st_contains.md b/docs/reference/esql/functions/kibana/docs/st_contains.md index 6e23bb9b0f116..99f3a19f9df41 100644 --- a/docs/reference/esql/functions/kibana/docs/st_contains.md +++ b/docs/reference/esql/functions/kibana/docs/st_contains.md @@ -4,6 +4,7 @@ This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../READ ### ST_CONTAINS Returns whether the first geometry contains the second geometry. +This is the inverse of the <> function. ``` FROM airport_city_boundaries diff --git a/docs/reference/esql/functions/kibana/docs/st_disjoint.md b/docs/reference/esql/functions/kibana/docs/st_disjoint.md index 7cf66b168bd70..4b42954efa5c1 100644 --- a/docs/reference/esql/functions/kibana/docs/st_disjoint.md +++ b/docs/reference/esql/functions/kibana/docs/st_disjoint.md @@ -4,6 +4,8 @@ This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../READ ### ST_DISJOINT Returns whether the two geometries or geometry columns are disjoint. +This is the inverse of the <> function. +In mathematical terms: ST_Disjoint(A, B) ⇔ A ⋂ B = ∅ ``` FROM airport_city_boundaries diff --git a/docs/reference/esql/functions/kibana/docs/st_intersects.md b/docs/reference/esql/functions/kibana/docs/st_intersects.md index e4db33429dbe3..b0a58b3ab2357 100644 --- a/docs/reference/esql/functions/kibana/docs/st_intersects.md +++ b/docs/reference/esql/functions/kibana/docs/st_intersects.md @@ -3,7 +3,11 @@ This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../READ --> ### ST_INTERSECTS -Returns whether the two geometries or geometry columns intersect. +Returns true if two geometries intersect. +They intersect if they have any point in common, including their interior points +(points along lines or within polygons). +This is the inverse of the <> function. +In mathematical terms: ST_Intersects(A, B) ⇔ A ⋂ B ≠ ∅ ``` FROM airports diff --git a/docs/reference/esql/functions/kibana/docs/st_within.md b/docs/reference/esql/functions/kibana/docs/st_within.md index cbb3ae5ee9aca..9ef046e5006f6 100644 --- a/docs/reference/esql/functions/kibana/docs/st_within.md +++ b/docs/reference/esql/functions/kibana/docs/st_within.md @@ -4,6 +4,7 @@ This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../READ ### ST_WITHIN Returns whether the first geometry is within the second geometry. +This is the inverse of the <> function. ``` FROM airport_city_boundaries diff --git a/docs/reference/esql/functions/kibana/docs/st_x.md b/docs/reference/esql/functions/kibana/docs/st_x.md index af2f4de1487cd..b113f19e1c76c 100644 --- a/docs/reference/esql/functions/kibana/docs/st_x.md +++ b/docs/reference/esql/functions/kibana/docs/st_x.md @@ -3,5 +3,10 @@ This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../READ --> ### ST_X -Extracts the x-coordinate from a point geometry. +Extracts the `x` coordinate from the supplied point. +If the points is of type `geo_point` this is equivalent to extracting the `longitude` value. +``` +ROW point = TO_GEOPOINT("POINT(42.97109629958868 14.7552534006536)") +| EVAL x = ST_X(point), y = ST_Y(point) +``` diff --git a/docs/reference/esql/functions/kibana/docs/st_y.md b/docs/reference/esql/functions/kibana/docs/st_y.md index 575a5bd3c7d33..db88c3ada63bb 100644 --- a/docs/reference/esql/functions/kibana/docs/st_y.md +++ b/docs/reference/esql/functions/kibana/docs/st_y.md @@ -3,5 +3,10 @@ This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../READ --> ### ST_Y -Extracts the y-coordinate from a point geometry. +Extracts the `y` coordinate from the supplied point. +If the points is of type `geo_point` this is equivalent to extracting the `latitude` value. +``` +ROW point = TO_GEOPOINT("POINT(42.97109629958868 14.7552534006536)") +| EVAL x = ST_X(point), y = ST_Y(point) +``` diff --git a/docs/reference/esql/functions/layout/case.asciidoc b/docs/reference/esql/functions/layout/case.asciidoc index 192e74522b8d3..edfc768dc7055 100644 --- a/docs/reference/esql/functions/layout/case.asciidoc +++ b/docs/reference/esql/functions/layout/case.asciidoc @@ -12,3 +12,4 @@ image::esql/functions/signature/case.svg[Embedded,opts=inline] include::../parameters/case.asciidoc[] include::../description/case.asciidoc[] include::../types/case.asciidoc[] +include::../examples/case.asciidoc[] diff --git a/docs/reference/esql/functions/layout/greatest.asciidoc b/docs/reference/esql/functions/layout/greatest.asciidoc index 1ff17f3c3adfe..fff9a32412947 100644 --- a/docs/reference/esql/functions/layout/greatest.asciidoc +++ b/docs/reference/esql/functions/layout/greatest.asciidoc @@ -12,3 +12,4 @@ image::esql/functions/signature/greatest.svg[Embedded,opts=inline] include::../parameters/greatest.asciidoc[] include::../description/greatest.asciidoc[] include::../types/greatest.asciidoc[] +include::../examples/greatest.asciidoc[] diff --git a/docs/reference/esql/functions/layout/least.asciidoc b/docs/reference/esql/functions/layout/least.asciidoc index a14a166c8bfe4..0daee9c181a65 100644 --- a/docs/reference/esql/functions/layout/least.asciidoc +++ b/docs/reference/esql/functions/layout/least.asciidoc @@ -12,3 +12,4 @@ image::esql/functions/signature/least.svg[Embedded,opts=inline] include::../parameters/least.asciidoc[] include::../description/least.asciidoc[] include::../types/least.asciidoc[] +include::../examples/least.asciidoc[] diff --git a/docs/reference/esql/functions/layout/now.asciidoc b/docs/reference/esql/functions/layout/now.asciidoc new file mode 100644 index 0000000000000..52341c1665619 --- /dev/null +++ b/docs/reference/esql/functions/layout/now.asciidoc @@ -0,0 +1,15 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-now]] +=== `NOW` + +*Syntax* + +[.text-center] +image::esql/functions/signature/now.svg[Embedded,opts=inline] + +include::../parameters/now.asciidoc[] +include::../description/now.asciidoc[] +include::../types/now.asciidoc[] +include::../examples/now.asciidoc[] diff --git a/docs/reference/esql/functions/layout/st_x.asciidoc b/docs/reference/esql/functions/layout/st_x.asciidoc index ce3824aa157b1..2c2dc191a31a4 100644 --- a/docs/reference/esql/functions/layout/st_x.asciidoc +++ b/docs/reference/esql/functions/layout/st_x.asciidoc @@ -12,3 +12,4 @@ image::esql/functions/signature/st_x.svg[Embedded,opts=inline] include::../parameters/st_x.asciidoc[] include::../description/st_x.asciidoc[] include::../types/st_x.asciidoc[] +include::../examples/st_x.asciidoc[] diff --git a/docs/reference/esql/functions/layout/st_y.asciidoc b/docs/reference/esql/functions/layout/st_y.asciidoc index 702e9097ae689..0708465760bb3 100644 --- a/docs/reference/esql/functions/layout/st_y.asciidoc +++ b/docs/reference/esql/functions/layout/st_y.asciidoc @@ -12,3 +12,4 @@ image::esql/functions/signature/st_y.svg[Embedded,opts=inline] include::../parameters/st_y.asciidoc[] include::../description/st_y.asciidoc[] include::../types/st_y.asciidoc[] +include::../examples/st_y.asciidoc[] diff --git a/docs/reference/esql/functions/least.asciidoc b/docs/reference/esql/functions/least.asciidoc deleted file mode 100644 index 2860eb31090c4..0000000000000 --- a/docs/reference/esql/functions/least.asciidoc +++ /dev/null @@ -1,38 +0,0 @@ -[discrete] -[[esql-least]] -=== `LEAST` - -*Syntax* - -[.text-center] -image::esql/functions/signature/least.svg[Embedded,opts=inline] - -*Parameters* - -`first`:: -First of the columns to evaluate. - -`rest`:: -The rest of the columns to evaluate. - -*Description* - -Returns the minimum value from multiple columns. This is similar to -<> except it is intended to run on multiple columns at once. - -NOTE: When run on `keyword` or `text` fields, this returns the first string - in alphabetical order. When run on `boolean` columns this will return - `false` if any values are `false`. - -include::types/least.asciidoc[] - -*Example* - -[source.merge.styled,esql] ----- -include::{esql-specs}/math.csv-spec[tag=least] ----- -[%header.monospaced.styled,format=dsv,separator=|] -|=== -include::{esql-specs}/math.csv-spec[tag=least-result] -|=== diff --git a/docs/reference/esql/functions/now.asciidoc b/docs/reference/esql/functions/now.asciidoc deleted file mode 100644 index 3c46f557acd1f..0000000000000 --- a/docs/reference/esql/functions/now.asciidoc +++ /dev/null @@ -1,28 +0,0 @@ -[discrete] -[[esql-now]] -=== `NOW` - -*Syntax* - -[source,esql] ----- -NOW() ----- - -*Description* - -Returns current date and time. - -*Example* - -[source,esql] ----- -include::{esql-specs}/date.csv-spec[tag=docsNow] ----- - -To retrieve logs from the last hour: - -[source,esql] ----- -include::{esql-specs}/date.csv-spec[tag=docsNowWhere] ----- \ No newline at end of file diff --git a/docs/reference/esql/functions/parameters/case.asciidoc b/docs/reference/esql/functions/parameters/case.asciidoc index c3617b7c0e32c..ee6f7e499b3b3 100644 --- a/docs/reference/esql/functions/parameters/case.asciidoc +++ b/docs/reference/esql/functions/parameters/case.asciidoc @@ -3,7 +3,7 @@ *Parameters* `condition`:: - +A condition. `trueValue`:: - +The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches. diff --git a/docs/reference/esql/functions/parameters/coalesce.asciidoc b/docs/reference/esql/functions/parameters/coalesce.asciidoc index 9b62a2e7e0d87..e0860c5bc3030 100644 --- a/docs/reference/esql/functions/parameters/coalesce.asciidoc +++ b/docs/reference/esql/functions/parameters/coalesce.asciidoc @@ -3,7 +3,7 @@ *Parameters* `first`:: -Expression to evaluate +Expression to evaluate. `rest`:: -Other expression to evaluate +Other expression to evaluate. diff --git a/docs/reference/esql/functions/parameters/greatest.asciidoc b/docs/reference/esql/functions/parameters/greatest.asciidoc index 83ac29d0bf7c9..8d23101aba7f3 100644 --- a/docs/reference/esql/functions/parameters/greatest.asciidoc +++ b/docs/reference/esql/functions/parameters/greatest.asciidoc @@ -3,7 +3,7 @@ *Parameters* `first`:: - +First of the columns to evaluate. `rest`:: - +The rest of the columns to evaluate. diff --git a/docs/reference/esql/functions/parameters/least.asciidoc b/docs/reference/esql/functions/parameters/least.asciidoc index 83ac29d0bf7c9..8d23101aba7f3 100644 --- a/docs/reference/esql/functions/parameters/least.asciidoc +++ b/docs/reference/esql/functions/parameters/least.asciidoc @@ -3,7 +3,7 @@ *Parameters* `first`:: - +First of the columns to evaluate. `rest`:: - +The rest of the columns to evaluate. diff --git a/docs/reference/esql/functions/parameters/now.asciidoc b/docs/reference/esql/functions/parameters/now.asciidoc new file mode 100644 index 0000000000000..25b3c973f1a26 --- /dev/null +++ b/docs/reference/esql/functions/parameters/now.asciidoc @@ -0,0 +1,3 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Parameters* diff --git a/docs/reference/esql/functions/parameters/st_contains.asciidoc b/docs/reference/esql/functions/parameters/st_contains.asciidoc index e87a0d0eb94f0..2f969f0f3cf05 100644 --- a/docs/reference/esql/functions/parameters/st_contains.asciidoc +++ b/docs/reference/esql/functions/parameters/st_contains.asciidoc @@ -3,7 +3,7 @@ *Parameters* `geomA`:: -Geometry column name or variable of geometry type +Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. `geomB`:: -Geometry column name or variable of geometry type +Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters. diff --git a/docs/reference/esql/functions/parameters/st_disjoint.asciidoc b/docs/reference/esql/functions/parameters/st_disjoint.asciidoc index e87a0d0eb94f0..2f969f0f3cf05 100644 --- a/docs/reference/esql/functions/parameters/st_disjoint.asciidoc +++ b/docs/reference/esql/functions/parameters/st_disjoint.asciidoc @@ -3,7 +3,7 @@ *Parameters* `geomA`:: -Geometry column name or variable of geometry type +Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. `geomB`:: -Geometry column name or variable of geometry type +Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters. diff --git a/docs/reference/esql/functions/parameters/st_intersects.asciidoc b/docs/reference/esql/functions/parameters/st_intersects.asciidoc index e87a0d0eb94f0..2f969f0f3cf05 100644 --- a/docs/reference/esql/functions/parameters/st_intersects.asciidoc +++ b/docs/reference/esql/functions/parameters/st_intersects.asciidoc @@ -3,7 +3,7 @@ *Parameters* `geomA`:: -Geometry column name or variable of geometry type +Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. `geomB`:: -Geometry column name or variable of geometry type +Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters. diff --git a/docs/reference/esql/functions/parameters/st_within.asciidoc b/docs/reference/esql/functions/parameters/st_within.asciidoc index e87a0d0eb94f0..2f969f0f3cf05 100644 --- a/docs/reference/esql/functions/parameters/st_within.asciidoc +++ b/docs/reference/esql/functions/parameters/st_within.asciidoc @@ -3,7 +3,7 @@ *Parameters* `geomA`:: -Geometry column name or variable of geometry type +Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. `geomB`:: -Geometry column name or variable of geometry type +Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters. diff --git a/docs/reference/esql/functions/parameters/st_x.asciidoc b/docs/reference/esql/functions/parameters/st_x.asciidoc index 4e8e77dea1f86..b66bfc286a443 100644 --- a/docs/reference/esql/functions/parameters/st_x.asciidoc +++ b/docs/reference/esql/functions/parameters/st_x.asciidoc @@ -3,4 +3,4 @@ *Parameters* `point`:: - +Expression of type `geo_point` or `cartesian_point`. If `null`, the function returns `null`. diff --git a/docs/reference/esql/functions/parameters/st_y.asciidoc b/docs/reference/esql/functions/parameters/st_y.asciidoc index 4e8e77dea1f86..b66bfc286a443 100644 --- a/docs/reference/esql/functions/parameters/st_y.asciidoc +++ b/docs/reference/esql/functions/parameters/st_y.asciidoc @@ -3,4 +3,4 @@ *Parameters* `point`:: - +Expression of type `geo_point` or `cartesian_point`. If `null`, the function returns `null`. diff --git a/docs/reference/esql/functions/signature/now.svg b/docs/reference/esql/functions/signature/now.svg new file mode 100644 index 0000000000000..2cd48ac561408 --- /dev/null +++ b/docs/reference/esql/functions/signature/now.svg @@ -0,0 +1 @@ +NOW() \ No newline at end of file diff --git a/docs/reference/esql/functions/spatial-functions.asciidoc b/docs/reference/esql/functions/spatial-functions.asciidoc index b6d178ddd624d..d143681fcf2f2 100644 --- a/docs/reference/esql/functions/spatial-functions.asciidoc +++ b/docs/reference/esql/functions/spatial-functions.asciidoc @@ -16,9 +16,9 @@ * experimental:[] <> // end::spatial_list[] -include::st_intersects.asciidoc[] -include::st_disjoint.asciidoc[] -include::st_contains.asciidoc[] -include::st_within.asciidoc[] -include::st_x.asciidoc[] -include::st_y.asciidoc[] +include::layout/st_intersects.asciidoc[] +include::layout/st_disjoint.asciidoc[] +include::layout/st_contains.asciidoc[] +include::layout/st_within.asciidoc[] +include::layout/st_x.asciidoc[] +include::layout/st_y.asciidoc[] diff --git a/docs/reference/esql/functions/st_contains.asciidoc b/docs/reference/esql/functions/st_contains.asciidoc deleted file mode 100644 index 110c4fe4ca9ec..0000000000000 --- a/docs/reference/esql/functions/st_contains.asciidoc +++ /dev/null @@ -1,26 +0,0 @@ -[discrete] -[[esql-st_contains]] -=== `ST_CONTAINS` - -experimental::[] - -*Syntax* - -[.text-center] -image::esql/functions/signature/st_contains.svg[Embedded,opts=inline] - -*Parameters* - -`geomA`:: -Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. - -`geomB`:: -Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. -The second parameter must also have the same coordinate system as the first. -This means it is not possible to combine `geo_*` and `cartesian_*` parameters. - -include::description/st_contains.asciidoc[] -This is the inverse of the <> function. - -include::types/st_contains.asciidoc[] -include::examples/st_contains.asciidoc[] diff --git a/docs/reference/esql/functions/st_disjoint.asciidoc b/docs/reference/esql/functions/st_disjoint.asciidoc deleted file mode 100644 index db89ca186a0ff..0000000000000 --- a/docs/reference/esql/functions/st_disjoint.asciidoc +++ /dev/null @@ -1,27 +0,0 @@ -[discrete] -[[esql-st_disjoint]] -=== `ST_DISJOINT` - -experimental::[] - -*Syntax* - -[.text-center] -image::esql/functions/signature/st_disjoint.svg[Embedded,opts=inline] - -*Parameters* - -`geomA`:: -Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. - -`geomB`:: -Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. -The second parameter must also have the same coordinate system as the first. -This means it is not possible to combine `geo_*` and `cartesian_*` parameters. - -include::description/st_disjoint.asciidoc[] -This is the inverse of the <> function. -In mathematical terms: ST_Disjoint(A, B) ⇔ A ⋂ B = ∅ - -include::types/st_disjoint.asciidoc[] -include::examples/st_disjoint.asciidoc[] diff --git a/docs/reference/esql/functions/st_intersects.asciidoc b/docs/reference/esql/functions/st_intersects.asciidoc deleted file mode 100644 index d75a7f3a50e0f..0000000000000 --- a/docs/reference/esql/functions/st_intersects.asciidoc +++ /dev/null @@ -1,31 +0,0 @@ -[discrete] -[[esql-st_intersects]] -=== `ST_INTERSECTS` - -experimental::[] - -*Syntax* - -[.text-center] -image::esql/functions/signature/st_intersects.svg[Embedded,opts=inline] - -*Parameters* - -`geomA`:: -Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. - -`geomB`:: -Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. -The second parameter must also have the same coordinate system as the first. -This means it is not possible to combine `geo_*` and `cartesian_*` parameters. - -*Description* - -Returns true if two geometries intersect. -They intersect if they have any point in common, including their interior points -(points along lines or within polygons). -This is the inverse of the <> function. -In mathematical terms: ST_Intersects(A, B) ⇔ A ⋂ B ≠ ∅ - -include::types/st_intersects.asciidoc[] -include::examples/st_intersects.asciidoc[] diff --git a/docs/reference/esql/functions/st_within.asciidoc b/docs/reference/esql/functions/st_within.asciidoc deleted file mode 100644 index 0f0190a9de638..0000000000000 --- a/docs/reference/esql/functions/st_within.asciidoc +++ /dev/null @@ -1,26 +0,0 @@ -[discrete] -[[esql-st_within]] -=== `ST_WITHIN` - -experimental::[] - -*Syntax* - -[.text-center] -image::esql/functions/signature/st_within.svg[Embedded,opts=inline] - -*Parameters* - -`geomA`:: -Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. - -`geomB`:: -Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. -The second parameter must also have the same coordinate system as the first. -This means it is not possible to combine `geo_*` and `cartesian_*` parameters. - -include::description/st_within.asciidoc[] -This is the inverse of the <> function. - -include::types/st_within.asciidoc[] -include::examples/st_within.asciidoc[] diff --git a/docs/reference/esql/functions/st_x.asciidoc b/docs/reference/esql/functions/st_x.asciidoc deleted file mode 100644 index eec48894b5150..0000000000000 --- a/docs/reference/esql/functions/st_x.asciidoc +++ /dev/null @@ -1,33 +0,0 @@ -[discrete] -[[esql-st_x]] -=== `ST_X` - -experimental::[] - -*Syntax* - -[.text-center] -image::esql/functions/signature/st_x.svg[Embedded,opts=inline] - -*Parameters* - -`point`:: -Expression of type `geo_point` or `cartesian_point`. If `null`, the function returns `null`. - -*Description* - -Extracts the `x` coordinate from the supplied point. -If the points is of type `geo_point` this is equivalent to extracting the `longitude` value. - -include::types/st_x.asciidoc[] - -*Example* - -[source.merge.styled,esql] ----- -include::{esql-specs}/spatial.csv-spec[tag=st_x_y] ----- -[%header.monospaced.styled,format=dsv,separator=|] -|=== -include::{esql-specs}/spatial.csv-spec[tag=st_x_y-result] -|=== diff --git a/docs/reference/esql/functions/st_y.asciidoc b/docs/reference/esql/functions/st_y.asciidoc deleted file mode 100644 index 8fc7281e395d2..0000000000000 --- a/docs/reference/esql/functions/st_y.asciidoc +++ /dev/null @@ -1,33 +0,0 @@ -[discrete] -[[esql-st_y]] -=== `ST_Y` - -experimental::[] - -*Syntax* - -[.text-center] -image::esql/functions/signature/st_y.svg[Embedded,opts=inline] - -*Parameters* - -`point`:: -Expression of type `geo_point` or `cartesian_point`. If `null`, the function returns `null`. - -*Description* - -Extracts the `y` coordinate from the supplied point. -If the points is of type `geo_point` this is equivalent to extracting the `latitude` value. - -include::types/st_y.asciidoc[] - -*Example* - -[source.merge.styled,esql] ----- -include::{esql-specs}/spatial.csv-spec[tag=st_x_y] ----- -[%header.monospaced.styled,format=dsv,separator=|] -|=== -include::{esql-specs}/spatial.csv-spec[tag=st_x_y-result] -|=== diff --git a/docs/reference/esql/functions/types/case.asciidoc b/docs/reference/esql/functions/types/case.asciidoc index e7d627ab915a1..85e4193b5bf2f 100644 --- a/docs/reference/esql/functions/types/case.asciidoc +++ b/docs/reference/esql/functions/types/case.asciidoc @@ -5,5 +5,15 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== condition | trueValue | result -keyword +boolean | boolean | boolean +boolean | cartesian_point | cartesian_point +boolean | datetime | datetime +boolean | double | double +boolean | geo_point | geo_point +boolean | integer | integer +boolean | ip | ip +boolean | long | long +boolean | text | text +boolean | unsigned_long | unsigned_long +boolean | version | version |=== diff --git a/docs/reference/esql/functions/types/now.asciidoc b/docs/reference/esql/functions/types/now.asciidoc new file mode 100644 index 0000000000000..5737d98f2f7db --- /dev/null +++ b/docs/reference/esql/functions/types/now.asciidoc @@ -0,0 +1,9 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + +[%header.monospaced.styled,format=dsv,separator=|] +|=== +result +datetime +|=== diff --git a/docs/reference/esql/index.asciidoc b/docs/reference/esql/index.asciidoc index 5cb02064dc794..54627a6de3c62 100644 --- a/docs/reference/esql/index.asciidoc +++ b/docs/reference/esql/index.asciidoc @@ -6,8 +6,6 @@ [partintro] -preview::["Do not use {esql} on production environments. This functionality is in technical preview and may be changed or removed in a future release. Elastic will work to fix any issues, but features in technical preview are not subject to the support SLA of official GA features."] - The {es} Query Language ({esql}) provides a powerful way to filter, transform, and analyze data stored in {es}, and in the future in other runtimes. It is designed to be easy to learn and use, by end users, SRE teams, application diff --git a/docs/reference/esql/processing-commands/enrich.asciidoc b/docs/reference/esql/processing-commands/enrich.asciidoc index f73eea6018cbc..5470d81b2f40b 100644 --- a/docs/reference/esql/processing-commands/enrich.asciidoc +++ b/docs/reference/esql/processing-commands/enrich.asciidoc @@ -57,11 +57,11 @@ in this example). `ENRICH` will look for records in the [source.merge.styled,esql] ---- -include::{esql-specs}/docs-IT_tests_only.csv-spec[tag=enrich] +include::{esql-specs}/enrich.csv-spec[tag=enrich] ---- [%header.monospaced.styled,format=dsv,separator=|] |=== -include::{esql-specs}/docs-IT_tests_only.csv-spec[tag=enrich-result] +include::{esql-specs}/enrich.csv-spec[tag=enrich-result] |=== To use a column with a different name than the `match_field` defined in the @@ -69,11 +69,11 @@ policy as the match field, use `ON `: [source.merge.styled,esql] ---- -include::{esql-specs}/docs-IT_tests_only.csv-spec[tag=enrich_on] +include::{esql-specs}/enrich.csv-spec[tag=enrich_on] ---- [%header.monospaced.styled,format=dsv,separator=|] |=== -include::{esql-specs}/docs-IT_tests_only.csv-spec[tag=enrich_on-result] +include::{esql-specs}/enrich.csv-spec[tag=enrich_on-result] |=== By default, each of the enrich fields defined in the policy is added as a @@ -82,22 +82,22 @@ column. To explicitly select the enrich fields that are added, use [source.merge.styled,esql] ---- -include::{esql-specs}/docs-IT_tests_only.csv-spec[tag=enrich_with] +include::{esql-specs}/enrich.csv-spec[tag=enrich_with] ---- [%header.monospaced.styled,format=dsv,separator=|] |=== -include::{esql-specs}/docs-IT_tests_only.csv-spec[tag=enrich_with-result] +include::{esql-specs}/enrich.csv-spec[tag=enrich_with-result] |=== You can rename the columns that are added using `WITH new_name=`: [source.merge.styled,esql] ---- -include::{esql-specs}/docs-IT_tests_only.csv-spec[tag=enrich_rename] +include::{esql-specs}/enrich.csv-spec[tag=enrich_rename] ---- [%header.monospaced.styled,format=dsv,separator=|] |=== -include::{esql-specs}/docs-IT_tests_only.csv-spec[tag=enrich_rename-result] +include::{esql-specs}/enrich.csv-spec[tag=enrich_rename-result] |=== In case of name collisions, the newly created columns will override existing diff --git a/docs/reference/esql/processing-commands/mv_expand.asciidoc b/docs/reference/esql/processing-commands/mv_expand.asciidoc index 46dc4fd0a33cf..9e1cb5573c381 100644 --- a/docs/reference/esql/processing-commands/mv_expand.asciidoc +++ b/docs/reference/esql/processing-commands/mv_expand.asciidoc @@ -2,6 +2,8 @@ [[esql-mv_expand]] === `MV_EXPAND` +preview::[] + **Syntax** [source,esql] diff --git a/docs/reference/high-availability/cluster-design.asciidoc b/docs/reference/high-availability/cluster-design.asciidoc index 3f8e19b47d37a..6c17a494f36ae 100644 --- a/docs/reference/high-availability/cluster-design.asciidoc +++ b/docs/reference/high-availability/cluster-design.asciidoc @@ -7,14 +7,14 @@ nodes to take over their responsibilities, an {es} cluster can continue operating normally if some of its nodes are unavailable or disconnected. There is a limit to how small a resilient cluster can be. All {es} clusters -require: +require the following components to function: -- One <> node -- At least one node for each <>. -- At least one copy of every <>. +- One <> +- At least one node for each <> +- At least one copy of every <> A resilient cluster requires redundancy for every required cluster component. -This means a resilient cluster must have: +This means a resilient cluster must have the following components: - At least three master-eligible nodes - At least two nodes of each role @@ -375,11 +375,11 @@ The cluster will be resilient to the loss of any zone as long as: - There are at least two zones containing data nodes. - Every index that is not a <> has at least one replica of each shard, in addition to the primary. -- Shard allocation awareness is configured to avoid concentrating all copies of - a shard within a single zone. +- <> is configured to + avoid concentrating all copies of a shard within a single zone. - The cluster has at least three master-eligible nodes. At least two of these - nodes are not voting-only master-eligible nodes, and they are spread evenly - across at least three zones. + nodes are not <>, + and they are spread evenly across at least three zones. - Clients are configured to send their requests to nodes in more than one zone or are configured to use a load balancer that balances the requests across an appropriate set of nodes. The {ess-trial}[Elastic Cloud] service provides such diff --git a/docs/reference/images/shard-allocation/shard-allocation-awareness-one-rack.png b/docs/reference/images/shard-allocation/shard-allocation-awareness-one-rack.png new file mode 100644 index 0000000000000..d5a3040cc5343 Binary files /dev/null and b/docs/reference/images/shard-allocation/shard-allocation-awareness-one-rack.png differ diff --git a/docs/reference/images/shard-allocation/shard-allocation-awareness-two-racks.png b/docs/reference/images/shard-allocation/shard-allocation-awareness-two-racks.png new file mode 100644 index 0000000000000..ce2ce6b2a95e9 Binary files /dev/null and b/docs/reference/images/shard-allocation/shard-allocation-awareness-two-racks.png differ diff --git a/docs/reference/index.asciidoc b/docs/reference/index.asciidoc index e47304f1e1337..2057519719177 100644 --- a/docs/reference/index.asciidoc +++ b/docs/reference/index.asciidoc @@ -10,7 +10,7 @@ include::intro.asciidoc[] include::release-notes/highlights.asciidoc[] -include::getting-started.asciidoc[] +include::quickstart/index.asciidoc[] include::setup.asciidoc[] diff --git a/docs/reference/ml/anomaly-detection/ml-configuring-alerts.asciidoc b/docs/reference/ml/anomaly-detection/ml-configuring-alerts.asciidoc index 2e678b929d296..89eb6e8559056 100644 --- a/docs/reference/ml/anomaly-detection/ml-configuring-alerts.asciidoc +++ b/docs/reference/ml/anomaly-detection/ml-configuring-alerts.asciidoc @@ -27,12 +27,7 @@ TIP: If you have created rules for specific {anomaly-jobs} and you want to monitor whether these jobs work as expected, {anomaly-jobs} health rules are ideal for this purpose. -In *{stack-manage-app} > {rules-ui}*, you can create both types of {ml} rules: - -[role="screenshot"] -image::images/ml-rule.png["Creating a new machine learning rule",500] -// NOTE: This is an autogenerated screenshot. Do not edit it directly. - +In *{stack-manage-app} > {rules-ui}*, you can create both types of {ml} rules. In the *{ml-app}* app, you can create only {anomaly-detect} alert rules; create them from the {anomaly-job} wizard after you start the job or from the {anomaly-job} list. diff --git a/docs/reference/ml/images/ml-rule.png b/docs/reference/ml/images/ml-rule.png deleted file mode 100644 index f7ebcb3716b81..0000000000000 Binary files a/docs/reference/ml/images/ml-rule.png and /dev/null differ diff --git a/docs/reference/modules/cluster/allocation_awareness.asciidoc b/docs/reference/modules/cluster/allocation_awareness.asciidoc index d447026fae293..9c6197f9ba40d 100644 --- a/docs/reference/modules/cluster/allocation_awareness.asciidoc +++ b/docs/reference/modules/cluster/allocation_awareness.asciidoc @@ -5,7 +5,7 @@ You can use custom node attributes as _awareness attributes_ to enable {es} to take your physical hardware configuration into account when allocating shards. If {es} knows which nodes are on the same physical server, in the same rack, or in the same zone, it can distribute the primary shard and its replica shards to -minimise the risk of losing all shard copies in the event of a failure. +minimize the risk of losing all shard copies in the event of a failure. When shard allocation awareness is enabled with the <> @@ -19,22 +19,27 @@ allocated in each location. If the number of nodes in each location is unbalanced and there are a lot of replicas, replica shards might be left unassigned. +TIP: Learn more about <>. + [[enabling-awareness]] ===== Enabling shard allocation awareness To enable shard allocation awareness: -. Specify the location of each node with a custom node attribute. For example, -if you want Elasticsearch to distribute shards across different racks, you might -set an awareness attribute called `rack_id` in each node's `elasticsearch.yml` -config file. +. Specify the location of each node with a custom node attribute. For example, +if you want Elasticsearch to distribute shards across different racks, you might +use an awareness attribute called `rack_id`. ++ +You can set custom attributes in two ways: + +- By editing the `elasticsearch.yml` config file: + [source,yaml] -------------------------------------------------------- node.attr.rack_id: rack_one -------------------------------------------------------- + -You can also set custom attributes when you start a node: +- Using the `-E` command line argument when you start a node: + [source,sh] -------------------------------------------------------- @@ -56,17 +61,33 @@ cluster.routing.allocation.awareness.attributes: rack_id <1> + You can also use the <> API to set or update -a cluster's awareness attributes. +a cluster's awareness attributes: ++ +[source,console] +-------------------------------------------------- +PUT /_cluster/settings +{ + "persistent" : { + "cluster.routing.allocation.awareness.attributes" : "rack_id" + } +} +-------------------------------------------------- With this example configuration, if you start two nodes with `node.attr.rack_id` set to `rack_one` and create an index with 5 primary shards and 1 replica of each primary, all primaries and replicas are -allocated across the two nodes. +allocated across the two node. + +.All primaries and replicas allocated across two nodes in the same rack +image::images/shard-allocation/shard-allocation-awareness-one-rack.png[All primaries and replicas are allocated across two nodes in the same rack] If you add two nodes with `node.attr.rack_id` set to `rack_two`, {es} moves shards to the new nodes, ensuring (if possible) that no two copies of the same shard are in the same rack. +.Primaries and replicas allocated across four nodes in two racks, with no two copies of the same shard in the same rack +image::images/shard-allocation/shard-allocation-awareness-two-racks.png[Primaries and replicas are allocated across four nodes in two racks with no two copies of the same shard in the same rack] + If `rack_two` fails and takes down both its nodes, by default {es} allocates the lost shard copies to nodes in `rack_one`. To prevent multiple copies of a particular shard from being allocated in the same location, you can diff --git a/docs/reference/modules/cluster/remote-clusters-settings.asciidoc b/docs/reference/modules/cluster/remote-clusters-settings.asciidoc index 848a29c64279c..2308ec259da48 100644 --- a/docs/reference/modules/cluster/remote-clusters-settings.asciidoc +++ b/docs/reference/modules/cluster/remote-clusters-settings.asciidoc @@ -59,35 +59,40 @@ you configure the remotes. `cluster.remote..transport.compress`:: - Per cluster setting that enables you to configure compression for requests - to a specific remote cluster. This setting impacts only requests - sent to the remote cluster. If the inbound request is compressed, - Elasticsearch compresses the response. The setting options are `true`, - `indexing_data`, and `false`. If unset, the global `transport.compress` is - used as the fallback setting. + Per-cluster setting that enables you to configure compression for requests to + a specific remote cluster. The handling cluster will automatically compress + responses to compressed requests. The setting options are `true`, + `indexing_data`, and `false`. If unset, defaults to the behaviour specified + by the node-wide `transport.compress` setting. See the + <> for further information. `cluster.remote..transport.compression_scheme`:: - Per cluster setting that enables you to configure compression scheme for - requests to a specific remote cluster. This setting impacts only requests - sent to the remote cluster. If an inbound request is compressed, {es} - compresses the response using the same compression scheme. The setting options - are `deflate` and `lz4`. If unset, the global `transport.compression_scheme` - is used as the fallback setting. + Per-cluster setting that enables you to configure the compression scheme for + requests to a specific cluster if those requests are selected to be + compressed by to the `cluster.remote..transport.compress` + setting. The handling cluster will automatically use the same compression + scheme for responses as for the corresponding requests. The setting options + are `deflate` and `lz4`. If unset, defaults to the behaviour specified by the + node-wide `transport.compression_scheme` setting. See the + <> for further information. - -`cluster.remote..credentials` (<>, <>):: [[remote-cluster-credentials-setting]] - - Per cluster setting for configuring <>. - This setting takes the encoded value of a - <> and must be set - in the <> on each node in the cluster. - The presence (or not) of this setting determines which model a remote cluster uses. - If present, the remote cluster uses the API key based model. - Otherwise, it uses the certificate based model. - If the setting is added, removed, or updated in the <> and reloaded via the - <> API, the cluster will automatically rebuild its connection to the remote. +`cluster.remote..credentials`:: + + (<>, <>) + Per-cluster setting for configuring <>. This setting takes the encoded value of a + <> and must + be set in the <> on each node in the cluster. + The presence (or not) of this setting determines which model a remote cluster + uses. If present, the remote cluster uses the API key based model. Otherwise, + it uses the certificate based model. If the setting is added, removed, or + updated in the <> and reloaded via the + <> API, the cluster will automatically + rebuild its connection to the remote. [[remote-cluster-sniff-settings]] ==== Sniff mode remote cluster settings diff --git a/docs/reference/modules/transport.asciidoc b/docs/reference/modules/transport.asciidoc index 2ec574544f9bb..d08da2cfc1d2f 100644 --- a/docs/reference/modules/transport.asciidoc +++ b/docs/reference/modules/transport.asciidoc @@ -47,20 +47,44 @@ different from `transport.port`. Defaults to the port assigned via The connect timeout for initiating a new connection (in time setting format). Defaults to `30s`. +[[transport-settings-compress]] `transport.compress`:: (<>, string) -Set to `true`, `indexing_data`, or `false` to configure transport compression -between nodes. The option `true` will compress all data. The option -`indexing_data` will compress only the raw index data sent between nodes during -ingest, ccr following (excluding bootstrap), and operations based shard recovery -(excluding transferring lucene files). Defaults to `indexing_data`. +Determines which transport requests are compressed before sending them to +another node. {es} will compress transport responses if and only if the +corresponding request was compressed. See also `transport.compression_scheme`, +which specifies the compression scheme which is used. Accepts the following +values: ++ +-- +`false`:: + +No transport requests are compressed. This option uses the most network +bandwidth, but avoids the CPU overhead of compression and decompression. + +`indexing_data`:: + +Compresses only the raw indexing data sent between nodes during ingest, CCR +following (excluding bootstrapping) and operations-based shard recovery +(excluding file-based recovery which copies the raw Lucene data). This option +is a good trade-off between network bandwidth savings and the extra CPU +required for compression and decompression. This option is the default. + +`true`:: + +All transport requests are compressed. This option may perform better than +`indexing_data` in terms of network bandwidth, but will require the most CPU +for compression and decompression work. +-- +[[transport-settings-compression-scheme]] `transport.compression_scheme`:: (<>, string) -Configures the compression scheme for `transport.compress`. The options are -`deflate` or `lz4`. If `lz4` is configured and the remote node has not been -upgraded to a version supporting `lz4`, the traffic will be sent uncompressed. -Defaults to `lz4`. +Configures the compression scheme for requests which are selected for +compression by to the `transport.compress` setting. Accepts either `deflate` or +`lz4`, which offer different trade-offs between compression ratio and CPU +usage. {es} will use the same compression scheme for responses as for the +corresponding requests. Defaults to `lz4`. `transport.tcp.keep_alive`:: (<>, boolean) diff --git a/docs/reference/getting-started.asciidoc b/docs/reference/quickstart/getting-started.asciidoc similarity index 98% rename from docs/reference/getting-started.asciidoc rename to docs/reference/quickstart/getting-started.asciidoc index 2a5dbc2f0d031..6b3095e07f9d4 100644 --- a/docs/reference/getting-started.asciidoc +++ b/docs/reference/quickstart/getting-started.asciidoc @@ -1,10 +1,9 @@ -[chapter] [[getting-started]] -= Quick start +== Quick start guide This guide helps you learn how to: -* install and run {es} and {kib} (using {ecloud} or Docker), +* Run {es} and {kib} (using {ecloud} or in a local Docker dev environment), * add simple (non-timestamped) dataset to {es}, * run basic searches. diff --git a/docs/reference/quickstart/index.asciidoc b/docs/reference/quickstart/index.asciidoc new file mode 100644 index 0000000000000..e517d039e620b --- /dev/null +++ b/docs/reference/quickstart/index.asciidoc @@ -0,0 +1,10 @@ +[[quickstart]] += Quickstart + +Get started quickly with {es}. + +* Learn how to run {es} (and {kib}) for <>. +* Follow our <> to add data to {es} and query it. + +include::run-elasticsearch-locally.asciidoc[] +include::getting-started.asciidoc[] diff --git a/docs/reference/quickstart/run-elasticsearch-locally.asciidoc b/docs/reference/quickstart/run-elasticsearch-locally.asciidoc new file mode 100644 index 0000000000000..0db395ba34b0a --- /dev/null +++ b/docs/reference/quickstart/run-elasticsearch-locally.asciidoc @@ -0,0 +1,175 @@ +[[run-elasticsearch-locally]] +== Run {es} locally in Docker (without security) +++++ +Local dev setup (Docker) +++++ + +[WARNING] +==== +*DO NOT USE THESE INSTRUCTIONS FOR PRODUCTION DEPLOYMENTS* + +The instructions on this page are for *local development only*. Do not use these instructions for production deployments, because they are not secure. +While this approach is convenient for experimenting and learning, you should never run the service in this way in a production environment. +==== + +The following commands help you very quickly spin up a single-node {es} cluster, together with {kib} in Docker. +Note that if you don't need the {kib} UI, you can skip those instructions. + +[discrete] +[[local-dev-why]] +=== When would I use this setup? + +Use this setup if you want to quickly spin up {es} (and {kib}) for local development or testing. + +For example you might: + +* Want to run a quick test to see how a feature works. +* Follow a tutorial or guide that requires an {es} cluster, like our <>. +* Experiment with the {es} APIs using different tools, like the Dev Tools Console, cURL, or an Elastic programming language client. +* Quickly spin up an {es} cluster to test an executable https://github.com/elastic/elasticsearch-labs/tree/main/notebooks#readme[Python notebook] locally. + +[discrete] +[[local-dev-prerequisites]] +=== Prerequisites + +If you don't have Docker installed, https://www.docker.com/products/docker-desktop[download and install Docker Desktop] for your operating system. + +[discrete] +[[local-dev-env-vars]] +=== Set environment variables + +Configure the following environment variables. + +[source,sh] +---- +export ELASTIC_PASSWORD="" # password for "elastic" username +export KIBANA_PASSWORD="" # Used _internally_ by Kibana, must be at least 6 characters long +---- + +[discrete] +[[local-dev-create-docker-network]] +=== Create a Docker network + +To run both {es} and {kib}, you'll need to create a Docker network: + +[source,sh] +---- +docker network create elastic-net +---- + +[discrete] +[[local-dev-run-es]] +=== Run {es} + +Start the {es} container with the following command: + +ifeval::["{release-state}"=="unreleased"] +WARNING: Version {version} has not yet been released. +No Docker image is currently available for {es} {version}. +endif::[] + +[source,sh,subs="attributes"] +---- +docker run -p 127.0.0.1:9200:9200 -d --name elasticsearch --network elastic-net \ + -e ELASTIC_PASSWORD=$ELASTIC_PASSWORD \ + -e "discovery.type=single-node" \ + -e "xpack.security.http.ssl.enabled=false" \ + -e "xpack.license.self_generated.type=trial" \ + {docker-image} +---- + +[discrete] +[[local-dev-run-kib]] +=== Run {kib} (optional) + +To run {kib}, you must first set the `kibana_system` password in the {es} container. + +[source,sh,subs="attributes"] +---- +# configure the Kibana password in the ES container +curl -u elastic:$ELASTIC_PASSWORD \ + -X POST \ + http://localhost:9200/_security/user/kibana_system/_password \ + -d '{"password":"'"$KIBANA_PASSWORD"'"}' \ + -H 'Content-Type: application/json' +---- +// NOTCONSOLE + +Start the {kib} container with the following command: + +ifeval::["{release-state}"=="unreleased"] +WARNING: Version {version} has not yet been released. +No Docker image is currently available for {es} {version}. +endif::[] + +[source,sh,subs="attributes"] +---- +docker run -p 127.0.0.1:5601:5601 -d --name kibana --network elastic-net \ + -e ELASTICSEARCH_URL=http://elasticsearch:9200 \ + -e ELASTICSEARCH_HOSTS=http://elasticsearch:9200 \ + -e ELASTICSEARCH_USERNAME=kibana_system \ + -e ELASTICSEARCH_PASSWORD=$KIBANA_PASSWORD \ + -e "xpack.security.enabled=false" \ + -e "xpack.license.self_generated.type=trial" \ + {kib-docker-image} +---- + +[NOTE] +==== +The service is started with a trial license. The trial license enables all features of Elasticsearch for a trial period of 30 days. After the trial period expires, the license is downgraded to a basic license, which is free forever. If you prefer to skip the trial and use the basic license, set the value of the `xpack.license.self_generated.type` variable to basic instead. For a detailed feature comparison between the different licenses, refer to our https://www.elastic.co/subscriptions[subscriptions page]. +==== + +[discrete] +[[local-dev-connecting-clients]] +== Connecting to {es} with language clients + +To connect to the {es} cluster from a language client, you can use basic authentication with the `elastic` username and the password you set in the environment variable. + +You'll use the following connection details: + +* **{es} endpoint**: `http://localhost:9200` +* **Username**: `elastic` +* **Password**: `$ELASTIC_PASSWORD` (Value you set in the environment variable) + +For example, to connect with the Python `elasticsearch` client: + +[source,python] +---- +import os +from elasticsearch import Elasticsearch + +username = 'elastic' +password = os.getenv('ELASTIC_PASSWORD') # Value you set in the environment variable + +client = Elasticsearch( + "http://localhost:9200", + basic_auth=(username, password) +) + +print(client.info()) +---- + +Here's an example curl command using basic authentication: + +[source,sh,subs="attributes"] +---- +curl -u elastic:$ELASTIC_PASSWORD \ + -X PUT \ + http://localhost:9200/my-new-index \ + -H 'Content-Type: application/json' +---- +// NOTCONSOLE + +[discrete] +[[local-dev-next-steps]] +=== Next steps + +Use our <> to learn the basics of {es}: how to add data and query it. + +[discrete] +[[local-dev-production]] +=== Moving to production + +This setup is not suitable for production use. For production deployments, we recommend using our managed service on Elastic Cloud. https://cloud.elastic.co/registration[Sign up for a free trial] (no credit card required). + +Otherwise, refer to https://www.elastic.co/guide/en/elasticsearch/reference/current/install-elasticsearch.html[Install {es}] to learn about the various options for installing {es} in a self-managed production environment, including using Docker. diff --git a/docs/reference/rest-api/common-parms.asciidoc b/docs/reference/rest-api/common-parms.asciidoc index dd264c0e5bcd2..a2a397c4efe65 100644 --- a/docs/reference/rest-api/common-parms.asciidoc +++ b/docs/reference/rest-api/common-parms.asciidoc @@ -1062,8 +1062,8 @@ end::stats[] tag::stored_fields[] `stored_fields`:: -(Optional, Boolean) If `true`, retrieves the document fields stored in the -index rather than the document `_source`. Defaults to `false`. +(Optional, string) +A comma-separated list of <> to include in the response. end::stored_fields[] tag::sync[] diff --git a/docs/reference/rest-api/usage.asciidoc b/docs/reference/rest-api/usage.asciidoc index 6bdfaab17a4d0..0d21f648ab58b 100644 --- a/docs/reference/rest-api/usage.asciidoc +++ b/docs/reference/rest-api/usage.asciidoc @@ -308,7 +308,8 @@ GET /_xpack/usage }, "rollup" : { "available" : true, - "enabled" : true + "enabled" : true, + ... }, "ilm" : { "policy_count" : 3, @@ -496,6 +497,7 @@ GET /_xpack/usage } ------------------------------------------------------------ // TESTRESPONSE[s/"security" : \{[^\}]*\},/"security" : $body.$_path,/] +// TESTRESPONSE[s/"rollup" : \{[^\}]*\},/"rollup" : $body.$_path,/] // TESTRESPONSE[s/"detectors" : \{[^\}]*\},/"detectors" : $body.$_path,/] // TESTRESPONSE[s/"model_size" : \{[^\}]*\},/"model_size" : $body.$_path,/] // TESTRESPONSE[s/"eql" : \{[^\}]*\},/"eql" : $body.$_path,/] diff --git a/docs/reference/search/search-your-data/cohere-es.asciidoc b/docs/reference/search/search-your-data/cohere-es.asciidoc new file mode 100644 index 0000000000000..f12f23ad2c5dc --- /dev/null +++ b/docs/reference/search/search-your-data/cohere-es.asciidoc @@ -0,0 +1,372 @@ +[[cohere-es]] +=== Tutorial: Using Cohere with {es} +++++ +Using Cohere with {es} +++++ + +The instructions in this tutorial shows you how to compute embeddings with +Cohere using the {infer} API and store them for efficient vector or hybrid +search in {es}. This tutorial will use the Python {es} client to perform the +operations. + +You'll learn how to: + +* create an {infer} endpoint for text embedding using the Cohere service, +* create the necessary index mapping for the {es} index, +* build an {infer} pipeline to ingest documents into the index together with the +embeddings, +* perform hybrid search on the data, +* rerank search results by using Cohere's rerank model, +* design a RAG system with Cohere's Chat API. + +The tutorial uses the https://huggingface.co/datasets/mteb/scifact[SciFact] data +set. + +Refer to https://docs.cohere.com/docs/elasticsearch-and-cohere[Cohere's tutorial] +for an example using a different data set. + + +[discrete] +[[cohere-es-req]] +==== Requirements + +* A https://cohere.com/[Cohere account], +* an https://www.elastic.co/guide/en/cloud/current/ec-getting-started.html[Elastic Cloud] +account, +* Python 3.7 or higher. + + +[discrete] +[[cohere-es-packages]] +==== Install required packages + +Install {es} and Cohere: + +[source,py] +------------------------------------------------------------ +!pip install elasticsearch +!pip install cohere +------------------------------------------------------------ + +Import the required packages: + +[source,py] +------------------------------------------------------------ +from elasticsearch import Elasticsearch, helpers +import cohere +import json +import requests +------------------------------------------------------------ + +[discrete] +[[cohere-es-client]] +==== Create the {es} client + +To create your {es} client, you need: + +* https://www.elastic.co/search-labs/tutorials/install-elasticsearch/elastic-cloud#finding-your-cloud-id[your Cloud ID], +* https://www.elastic.co/search-labs/tutorials/install-elasticsearch/elastic-cloud#creating-an-api-key[an encoded API key]. + +[source,py] +------------------------------------------------------------ +ELASTICSEARCH_ENDPOINT = "elastic_endpoint" +ELASTIC_API_KEY = "elastic_api_key" + +client = Elasticsearch( + cloud_id=ELASTICSEARCH_ENDPOINT, + api_key=ELASTIC_API_KEY +) + +# Confirm the client has connected +print(client.info()) +------------------------------------------------------------ + + +[discrete] +[[cohere-es-infer-endpoint]] +==== Create the {infer} endpoint + +<> first. In this example, the +{infer} endpoint uses Cohere's `embed-english-v3.0` model and the +`embedding_type` is set to `byte`. + +[source,py] +------------------------------------------------------------ +COHERE_API_KEY = "cohere_api_key" + +client.inference.put_model( + task_type="text_embedding", + inference_id="cohere_embeddings", + body={ + "service": "cohere", + "service_settings": { + "api_key": COHERE_API_KEY, + "model_id": "embed-english-v3.0", + "embedding_type": "byte" + } + }, +) +------------------------------------------------------------ + +You can find your API keys in your Cohere dashboard under the +https://dashboard.cohere.com/api-keys[API keys section]. + + +[discrete] +[[cohere-es-index-mapping]] +==== Create the index mapping + +Create the index mapping for the index that will contain the embeddings. + +[source,py] +------------------------------------------------------------ +client.indices.create( + index="cohere-embeddings", + settings={"index": {"default_pipeline": "cohere_embeddings"}}, + mappings={ + "properties": { + "text_embedding": { + "type": "dense_vector", + "dims": 1024, + "element_type": "byte", + }, + "text": {"type": "text"}, + "id": {"type": "integer"}, + "title": {"type": "text"} + } + }, +) +------------------------------------------------------------ + + +[discrete] +[[cohere-es-infer-pipeline]] +==== Create the {infer} pipeline + +Now you have an {infer} endpoint and an index ready to store embeddings. The +next step is to create an <> with an +<> that will create the embeddings using +the {infer} endpoint and stores them in the index. + +[source,py] +-------------------------------------------------- +client.ingest.put_pipeline( + id="cohere_embeddings", + description="Ingest pipeline for Cohere inference.", + processors=[ + { + "inference": { + "model_id": "cohere_embeddings", + "input_output": { + "input_field": "text", + "output_field": "text_embedding", + }, + } + } + ], +) +-------------------------------------------------- + + +[discrete] +[[cohere-es-insert-documents]] +==== Prepare data and insert documents + +This example uses the https://huggingface.co/datasets/mteb/scifact[SciFact] data +set that you can find on HuggingFace. + +[source,py] +-------------------------------------------------- +url = 'https://huggingface.co/datasets/mteb/scifact/raw/main/corpus.jsonl' + +# Fetch the JSONL data from the URL +response = requests.get(url) +response.raise_for_status() # Ensure noticing bad responses + +# Split the content by new lines and parse each line as JSON +data = [json.loads(line) for line in response.text.strip().split('\n') if line] +# Now data is a list of dictionaries + +# Change `_id` key to `id` as `_id` is a reserved key in Elasticsearch. +for item in data: + if '_id' in item: + item['id'] = item.pop('_id') + +# Prepare the documents to be indexed +documents = [] +for line in data: + data_dict = line + documents.append({ + "_index": "cohere-embeddings", + "_source": data_dict, + } + ) + +# Use the bulk endpoint to index +helpers.bulk(client, documents) + +print("Data ingestion completed, text embeddings generated!") +-------------------------------------------------- + +Your index is populated with the SciFact data and text embeddings for the text +field. + + +[discrete] +[[cohere-es-hybrid-search]] +==== Hybrid search + +Let's start querying the index! + +The code below performs a hybrid search. The `kNN` query computes the relevance +of search results based on vector similarity using the `text_embedding` field, +the lexical search query uses BM25 retrieval to compute keyword similarity on +the `title` and `text` fields. + +[source,py] +-------------------------------------------------- +query = "What is biosimilarity?" + +response = client.search( + index="cohere-embeddings", + size=100, + knn={ + "field": "text_embedding", + "query_vector_builder": { + "text_embedding": { + "model_id": "cohere_embeddings", + "model_text": query, + } + }, + "k": 10, + "num_candidates": 50, + }, + query={ + "multi_match": { + "query": query, + "fields": ["text", "title"] + } + } +) + +raw_documents = response["hits"]["hits"] + +# Display the first 10 results +for document in raw_documents[0:10]: + print(f'Title: {document["_source"]["title"]}\nText: {document["_source"]["text"]}\n') + +# Format the documents for ranking +documents = [] +for hit in response["hits"]["hits"]: + documents.append(hit["_source"]["text"]) +-------------------------------------------------- + + +[discrete] +[[cohere-es-rerank-results]] +===== Rerank search results + +To combine the results more effectively, use +https://docs.cohere.com/docs/rerank-2[Cohere's Rerank v3] model through the +{infer} API to provide a more precise semantic reranking of the results. + +Create an {infer} endpoint with your Cohere API key and the used model name as +the `model_id` (`rerank-english-v3.0` in this example). + +[source,py] +-------------------------------------------------- +client.inference.put_model( + task_type="rerank", + inference_id="cohere_rerank", + body={ + "service": "cohere", + "service_settings":{ + "api_key": COHERE_API_KEY, + "model_id": "rerank-english-v3.0" + }, + "task_settings": { + "top_n": 10, + }, + } +) +-------------------------------------------------- + +Rerank the results using the new {infer} endpoint. + +[source,py] +-------------------------------------------------- +# Pass the query and the search results to the service +response = client.inference.inference( + inference_id="cohere_rerank", + body={ + "query": query, + "input": documents, + "task_settings": { + "return_documents": False + } + } +) + +# Reconstruct the input documents based on the index provided in the rereank response +ranked_documents = [] +for document in response.body["rerank"]: + ranked_documents.append({ + "title": raw_documents[int(document["index"])]["_source"]["title"], + "text": raw_documents[int(document["index"])]["_source"]["text"] + }) + +# Print the top 10 results +for document in ranked_documents[0:10]: + print(f"Title: {document['title']}\nText: {document['text']}\n") +-------------------------------------------------- + +The response is a list of documents in descending order of relevance. Each +document has a corresponding index that reflects the order of the documents when +they were sent to the {infer} endpoint. + + +[discrete] +[[cohere-es-rag]] +==== Retrieval Augmented Generation (RAG) with Cohere and {es} + +RAG is a method for generating text using additional information fetched from an +external data source. With the ranked results, you can build a RAG system on the +top of what you previously created by using +https://docs.cohere.com/docs/chat-api[Cohere's Chat API]. + +Pass in the retrieved documents and the query to receive a grounded response +using Cohere's newest generative model +https://docs.cohere.com/docs/command-r-plus[Command R+]. + +Then pass in the query and the documents to the Chat API, and print out the +response. + +[source,py] +-------------------------------------------------- +response = co.chat(message=query, documents=ranked_documents, model='command-r-plus') + +source_documents = [] +for citation in response.citations: + for document_id in citation.document_ids: + if document_id not in source_documents: + source_documents.append(document_id) + +print(f"Query: {query}") +print(f"Response: {response.text}") +print("Sources:") +for document in response.documents: + if document['id'] in source_documents: + print(f"{document['title']}: {document['text']}") + +-------------------------------------------------- + +The response will look similar to this: + +[source,consol-result] +-------------------------------------------------- +Query: What is biosimilarity? +Response: Biosimilarity is based on the comparability concept, which has been used successfully for several decades to ensure close similarity of a biological product before and after a manufacturing change. Over the last 10 years, experience with biosimilars has shown that even complex biotechnology-derived proteins can be copied successfully. +Sources: +Interchangeability of Biosimilars: A European Perspective: (...) +-------------------------------------------------- +// NOTCONSOLE diff --git a/docs/reference/search/search-your-data/retrievers-overview.asciidoc b/docs/reference/search/search-your-data/retrievers-overview.asciidoc new file mode 100644 index 0000000000000..fdd984819558b --- /dev/null +++ b/docs/reference/search/search-your-data/retrievers-overview.asciidoc @@ -0,0 +1,207 @@ +[[retrievers-overview]] +== Retrievers + +// Will move to a top level "Retrievers and reranking" section once reranking is live + +preview::[] + +A retriever is an abstraction that was added to the Search API in *8.14.0*. +This abstraction enables the configuration of multi-stage retrieval +pipelines within a single `_search` call. This simplifies your search +application logic, because you no longer need to configure complex searches via +multiple {es} calls or implement additional client-side logic to +combine results from different queries. + +This document provides a general overview of the retriever abstraction. +For implementation details, including notable restrictions, check out the +<> in the `_search` API docs. + +[discrete] +[[retrievers-overview-types]] +=== Retriever types + +Retrievers come in various types, each tailored for different search operations. +The following retrievers are currently available: + +* <>. Returns top documents from a +traditional https://www.elastic.co/guide/en/elasticsearch/reference/master/query-dsl.html[query]. +Mimics a traditional query but in the context of a retriever framework. This +ensures backward compatibility as existing `_search` requests remain supported. +That way you can transition to the new abstraction at your own pace without +mixing syntaxes. +* <>. Returns top documents from a <>, +in the context of a retriever framework. +* <>. Combines and ranks multiple first-stage retrievers using +the reciprocal rank fusion (RRF) algorithm. Allows you to combine multiple result sets +with different relevance indicators into a single result set. +An RRF retriever is a *compound retriever*, where its `filter` element is +propagated to its sub retrievers. ++ +Sub retrievers may not use elements that +are restricted by having a compound retriever as part of the retriever tree. +See the <> for detailed +examples and information on how to use the RRF retriever. + +[NOTE] +==== +Stay tuned for more retriever types in future releases! +==== + +[discrete] +=== What makes retrievers useful? + +Here's an overview of what makes retrievers useful and how they differ from +regular queries. + +. *Simplified user experience*. Retrievers simplify the user experience by +allowing entire retrieval pipelines to be configured in a single API call. This +maintains backward compatibility with traditional query elements by +automatically translating them to the appropriate retriever. +. *Structured retrieval*. Retrievers provide a more structured way to define search +operations. They allow searches to be described using a "retriever tree", a +hierarchical structure that clarifies the sequence and logic of operations, +making complex searches more understandable and manageable. +. *Composability and flexibility*. Retrievers enable flexible composability, +allowing you to build pipelines and seamlessly integrate different retrieval +strategies into these pipelines. Retrievers make it easy to test out different +retrieval strategy combinations. +. *Compound operations*. A retriever can have sub retrievers. This +allows complex nested searches where the results of one retriever feed into +another, supporting sophisticated querying strategies that might involve +multiple stages or criteria. +. *Retrieval as a first-class concept*. Unlike +traditional queries, where the query is a part of a larger search API call, +retrievers are designed as standalone entities that can be combined or used in +isolation. This enables a more modular and flexible approach to constructing +searches. +. *Enhanced control over document scoring and ranking*. Retrievers +allow for more explicit control over how documents are scored and filtered. For +instance, you can specify minimum score thresholds, apply complex filters +without affecting scoring, and use parameters like `terminate_after` for +performance optimizations. +. *Integration with existing {es} functionalities*. Even though +retrievers can be used instead of existing `_search` API syntax (like the +`query` and `knn`), they are designed to integrate seamlessly with things like +pagination (`search_after`) and sorting. They also maintain compatibility with +aggregation operations by treating the combination of all leaf retrievers as +`should` clauses in a boolean query. +. *Cleaner separation of concerns*. When using compound retrievers, only the +query element is allowed, which enforces a cleaner separation of concerns +and prevents the complexity that might arise from overly nested or +interdependent configurations. + +[discrete] +[[retrievers-overview-example]] +=== Example + +The following example demonstrates how using retrievers +simplify the composability of queries for RRF ranking. + +[source,js] +---- +GET example-index/_search +{ + "retriever": { + "rrf": { + "retrievers": [ + { + "standard": { + "query": { + "text_expansion": { + "vector.tokens": { + "model_id": ".elser_model_2", + "model_text": "What blue shoes are on sale?" + } + } + } + } + }, + { + "standard": { + "query": { + "match": { + "text": "blue shoes sale" + } + } + } + } + ] + } + } +} +---- +//NOTCONSOLE + +This example demonstrates how you can combine different +retrieval strategies into a single `retriever` pipeline. + +Compare to `RRF` with `sub_searches` approach: + +.*Expand* for example +[%collapsible] +============== + +[source,js] +---- +GET example-index/_search +{ + "sub_searches":[ + { + "query":{ + "match":{ + "text":"blue shoes sale" + } + } + }, + { + "query":{ + "text_expansion":{ + "vector.tokens":{ + "model_id":".elser_model_2", + "model_text":"What blue shoes are on sale?" + } + } + } + } + ], + "rank":{ + "rrf":{ + "window_size":50, + "rank_constant":20 + } + } +} +---- +//NOTCONSOLE +============== + +[discrete] +[[retrievers-overview-glossary]] +=== Glossary + +Here are some important terms: + +* *Retrieval Pipeline*. Defines the entire retrieval and ranking logic to +produce top hits. +* *Retriever Tree*. A hierarchical structure that defines how retrievers interact. +* *First-stage Retriever*. Returns an initial set of candidate documents. +* *Compound Retriever*. Builds on one or more retrievers, +enhancing document retrieval and ranking logic. +* *Combiners*. Compound retrievers that merge top hits +from multiple sub-retrievers. +//* NOT YET *Rerankers*. Special compound retrievers that reorder hits and may adjust the number of hits, with distinctions between first-stage and second-stage rerankers. + +[discrete] +[[retrievers-overview-play-in-search]] +=== Retrievers in action + +The Search Playground builds Elasticsearch queries using the retriever abstraction. +It automatically detects the fields and types in your index and builds a retriever tree based on your selections. + +You can use the Playground to experiment with different retriever configurations and see how they affect search results. + +Refer to the {kibana-ref}/playground.html[Playground documentation] for more information. +// Content coming in https://github.com/elastic/kibana/pull/182692 + + + diff --git a/docs/reference/search/search-your-data/search-your-data.asciidoc b/docs/reference/search/search-your-data/search-your-data.asciidoc index bed204985296c..e1c1618410f2f 100644 --- a/docs/reference/search/search-your-data/search-your-data.asciidoc +++ b/docs/reference/search/search-your-data/search-your-data.asciidoc @@ -43,10 +43,11 @@ DSL, with a simplified user experience. Create search applications based on your results directly in the Kibana Search UI. include::search-api.asciidoc[] -include::search-application-overview.asciidoc[] include::knn-search.asciidoc[] include::semantic-search.asciidoc[] +include::retrievers-overview.asciidoc[] include::learning-to-rank.asciidoc[] include::search-across-clusters.asciidoc[] include::search-with-synonyms.asciidoc[] +include::search-application-overview.asciidoc[] include::behavioral-analytics/behavioral-analytics-overview.asciidoc[] diff --git a/docs/reference/search/search-your-data/semantic-search.asciidoc b/docs/reference/search/search-your-data/semantic-search.asciidoc index a4d892c98645b..a1197e7bbbd3a 100644 --- a/docs/reference/search/search-your-data/semantic-search.asciidoc +++ b/docs/reference/search/search-your-data/semantic-search.asciidoc @@ -136,3 +136,4 @@ include::{es-ref-dir}/tab-widgets/semantic-search/hybrid-search-widget.asciidoc[ include::semantic-search-elser.asciidoc[] include::semantic-search-inference.asciidoc[] +include::cohere-es.asciidoc[] diff --git a/docs/reference/searchable-snapshots/index.asciidoc b/docs/reference/searchable-snapshots/index.asciidoc index 4a56961246c2b..794496c8b24ad 100644 --- a/docs/reference/searchable-snapshots/index.asciidoc +++ b/docs/reference/searchable-snapshots/index.asciidoc @@ -310,9 +310,9 @@ of {search-snap} indices. The sole copy of the data in a {search-snap} index is the underlying snapshot, stored in the repository. For example: -* You cannot unregister a repository while any of the searchable snapshots it -contains are mounted in {es}. You also cannot delete a snapshot if any of its -indices are mounted as a searchable snapshot in the same cluster. +* You must not unregister a repository while any of the searchable snapshots it +contains are mounted in {es}. You also must not delete a snapshot if any of its +indices are mounted as searchable snapshots. * If you mount indices from snapshots held in a repository to which a different cluster has write access then you must make sure that the other cluster does not diff --git a/docs/reference/settings/inference-settings.asciidoc b/docs/reference/settings/inference-settings.asciidoc new file mode 100644 index 0000000000000..fa0905cf0ef73 --- /dev/null +++ b/docs/reference/settings/inference-settings.asciidoc @@ -0,0 +1,92 @@ + +[role="xpack"] +[[inference-settings]] +=== Inference API settings in {es} +++++ +Inference settings +++++ + +[[inference-settings-description]] +// tag::inference-settings-description-tag[] +You do not need to configure any settings to use the {infer} APIs. Each setting has a default. +// end::inference-settings-description-tag[] + +[discrete] +[[xpack-inference-logging]] +// tag::inference-logging[] +==== Inference API logging settings + +When certain failures occur, a log message is emitted. In the case of a +reoccurring failure the logging throttler restricts repeated messages from being logged. + +`xpack.inference.logging.reset_interval`:: +(<>) Specifies the interval for when a cleanup thread will clear an internal +cache of the previously logged messages. Defaults to one day (`1d`). + +`xpack.inference.logging.wait_duration`:: +(<>) Specifies the amount of time to wait after logging a message before that +message can be logged again. Defaults to one hour (`1h`). +// end::inference-logging[] + +[[xpack-inference-http-settings]] +// tag::inference-http-settings[] +==== {infer-cap} API HTTP settings + +`xpack.inference.http.max_response_size`:: +(<>) Specifies the maximum size in bytes an HTTP response is allowed to have, +defaults to `10mb`, the maximum configurable value is `50mb`. + +`xpack.inference.http.max_total_connections`:: +(<>) Specifies the maximum number of connections the internal connection pool can +lease. Defaults to `50`. + +`xpack.inference.http.max_route_connections`:: +(<>) Specifies the maximum number of connections a single route can lease from +the internal connection pool. If this setting is set to a value equal to or greater than +`xpack.inference.http.max_total_connections`, then a single third party service could lease all available +connections and other third party services would be unable to lease connections. Defaults to `20`. + +`xpack.inference.http.connection_eviction_interval`:: +(<>) Specifies the interval that an eviction thread will run to remove expired and +stale connections from the internal connection pool. Decreasing this time value can help improve throughput if +multiple third party service are contending for the available connections in the pool. Defaults to one minute (`1m`). + +`xpack.inference.http.connection_eviction_max_idle_time`:: +(<>) Specifies the maximum duration a connection can be unused before it is marked as +idle and can be closed and removed from the shared connection pool. Defaults to one minute (`1m`). + +`xpack.inference.http.request_executor.queue_capacity`:: +(<>) Specifies the size of the internal queue for requests waiting to be sent. If +the queue is full and a request is sent to the {infer} API, it will be rejected. Defaults to `2000`. + +[[xpack-inference-http-retry-settings]] +==== {infer-cap} API HTTP Retry settings + +When a third-party service returns a transient failure code (for example, 429), the request is retried by the {infer} +API. These settings govern the retry behavior. When a request is retried, exponential backoff is used. + +`xpack.inference.http.retry.initial_delay`:: +(<>) Specifies the initial delay before retrying a request. Defaults to one second +(`1s`). + +`xpack.inference.http.retry.max_delay_bound`:: +(<>) Specifies the maximum delay for a request. Defaults to five seconds (`5s`). + +`xpack.inference.http.retry.timeout`:: +(<>) Specifies the maximum amount of time a request can be retried. +Once the request exceeds this time, the request will no longer be retried and a failure will be returned. +Defaults to 30 seconds (`30s`). +// end::inference-logging[] + +[[xpack-inference-input-text]] +// tag::inference-input-text[] +==== {infer-cap} API Input text + +For certain third-party service integrations, when the service returns an error indicating that the request +input was too large, the input will be truncated and the request is retried. These settings govern +how the truncation is performed. + +`xpack.inference.truncator.reduction_percentage`:: +(<>) Specifies the percentage to reduce the input text by if the 3rd party service +responds with an error indicating it is too long. Defaults to 50 percent (`0.5`). +// end::inference-input-text[] diff --git a/docs/reference/setup.asciidoc b/docs/reference/setup.asciidoc index e007b67a943b0..64626aafb2441 100644 --- a/docs/reference/setup.asciidoc +++ b/docs/reference/setup.asciidoc @@ -29,8 +29,6 @@ resource-heavy {ls} deployment should be on its own host. include::setup/install.asciidoc[] -include::setup/run-elasticsearch-locally.asciidoc[] - include::setup/configuration.asciidoc[] include::setup/important-settings.asciidoc[] @@ -70,6 +68,8 @@ include::setup/logging-config.asciidoc[] include::settings/ml-settings.asciidoc[] +include::settings/inference-settings.asciidoc[] + include::settings/monitoring-settings.asciidoc[] include::modules/node.asciidoc[] diff --git a/docs/reference/setup/install.asciidoc b/docs/reference/setup/install.asciidoc index 49501c46b8ba9..89373d0ce8d44 100644 --- a/docs/reference/setup/install.asciidoc +++ b/docs/reference/setup/install.asciidoc @@ -20,7 +20,7 @@ If you want to install and manage {es} yourself, you can: * Run {es} in a <>. * Set up and manage {es}, {kib}, {agent}, and the rest of the Elastic Stack on Kubernetes with {eck-ref}[{eck}]. -TIP: To try out Elasticsearch on your own machine, we recommend using Docker and running both Elasticsearch and Kibana. For more information, see <>. +TIP: To try out Elasticsearch on your own machine, we recommend using Docker and running both Elasticsearch and Kibana. For more information, see <>. Please note that this setup is *not suitable for production use*. [discrete] [[elasticsearch-install-packages]] diff --git a/docs/reference/setup/install/docker.asciidoc b/docs/reference/setup/install/docker.asciidoc index 0c518d520bdd5..370fc5c4ccf7e 100644 --- a/docs/reference/setup/install/docker.asciidoc +++ b/docs/reference/setup/install/docker.asciidoc @@ -8,6 +8,12 @@ https://github.com/elastic/elasticsearch/blob/{branch}/distribution/docker[GitHu include::license.asciidoc[] +[TIP] +==== +If you just want to test {es} in local development, refer to <>. +Please note that this setup is not suitable for production environments. +==== + [[docker-cli-run-dev-mode]] ==== Run {es} in Docker diff --git a/docs/reference/setup/run-elasticsearch-locally.asciidoc b/docs/reference/setup/run-elasticsearch-locally.asciidoc deleted file mode 100644 index a6e6d5c8963a2..0000000000000 --- a/docs/reference/setup/run-elasticsearch-locally.asciidoc +++ /dev/null @@ -1,183 +0,0 @@ -[[run-elasticsearch-locally]] -== Run Elasticsearch locally - -//// -IMPORTANT: This content is replicated in the Elasticsearch repo -README.ascidoc file. If you make changes, you must also update the -Elasticsearch README. -+ -GitHub renders the tagged region directives when you view the README, -so it's not possible to just include the content from the README. Darn. -+ -Also note that there are similar instructions in the Kibana guide: -https://www.elastic.co/guide/en/kibana/current/docker.html -//// - -To try out Elasticsearch on your own machine, we recommend using Docker -and running both Elasticsearch and Kibana. -Docker images are available from the https://www.docker.elastic.co[Elastic Docker registry]. - -NOTE: Starting in Elasticsearch 8.0, security is enabled by default. -The first time you start Elasticsearch, TLS encryption is configured automatically, -a password is generated for the `elastic` user, -and a Kibana enrollment token is created so you can connect Kibana to your secured cluster. - -For other installation options, see the -https://www.elastic.co/guide/en/elasticsearch/reference/current/install-elasticsearch.html[Elasticsearch installation documentation]. - -[discrete] -=== Start Elasticsearch - -. Install and start https://www.docker.com/products/docker-desktop[Docker -Desktop]. Go to **Preferences > Resources > Advanced** and set Memory to at least 4GB. - -. Start an Elasticsearch container: -ifeval::["{release-state}"=="unreleased"] -+ -WARNING: Version {version} of {es} has not yet been released, so no -Docker image is currently available for this version. -endif::[] -+ -[source,sh,subs="attributes"] ----- -docker network create elastic -docker pull docker.elastic.co/elasticsearch/elasticsearch:{version} -docker run --name elasticsearch --net elastic -p 9200:9200 -p 9300:9300 -e "discovery.type=single-node" -t docker.elastic.co/elasticsearch/elasticsearch:{version} ----- -+ -When you start Elasticsearch for the first time, the generated `elastic` user password and -Kibana enrollment token are output to the terminal. -+ -NOTE: You might need to scroll back a bit in the terminal to view the password -and enrollment token. - -. Copy the generated password and enrollment token and save them in a secure -location. These values are shown only when you start Elasticsearch for the first time. -You'll use these to enroll Kibana with your Elasticsearch cluster and log in. - -[discrete] -=== Start Kibana - -Kibana enables you to easily send requests to Elasticsearch and analyze, visualize, and manage data interactively. - -. In a new terminal session, start Kibana and connect it to your Elasticsearch container: -ifeval::["{release-state}"=="unreleased"] -+ -WARNING: Version {version} of {kib} has not yet been released, so no -Docker image is currently available for this version. -endif::[] -+ -[source,sh,subs="attributes"] ----- -docker pull docker.elastic.co/kibana/kibana:{version} -docker run --name kibana --net elastic -p 5601:5601 docker.elastic.co/kibana/kibana:{version} ----- -+ -When you start Kibana, a unique URL is output to your terminal. - -. To access Kibana, open the generated URL in your browser. - - .. Paste the enrollment token that you copied when starting - Elasticsearch and click the button to connect your Kibana instance with Elasticsearch. - - .. Log in to Kibana as the `elastic` user with the password that was generated - when you started Elasticsearch. - -[discrete] -=== Send requests to Elasticsearch - -You send data and other requests to Elasticsearch through REST APIs. -You can interact with Elasticsearch using any client that sends HTTP requests, -such as the https://www.elastic.co/guide/en/elasticsearch/client/index.html[Elasticsearch -language clients] and https://curl.se[curl]. -Kibana's developer console provides an easy way to experiment and test requests. -To access the console, go to **Management > Dev Tools**. - -[discrete] -=== Add data - -You index data into Elasticsearch by sending JSON objects (documents) through the REST APIs. -Whether you have structured or unstructured text, numerical data, or geospatial data, -Elasticsearch efficiently stores and indexes it in a way that supports fast searches. - -For timestamped data such as logs and metrics, you typically add documents to a -data stream made up of multiple auto-generated backing indices. - -To add a single document to an index, submit an HTTP post request that targets the index. - -[source,console] ----- -POST /customer/_doc/1 -{ - "firstname": "Jennifer", - "lastname": "Walters" -} ----- - -This request automatically creates the `customer` index if it doesn't exist, -adds a new document that has an ID of 1, and -stores and indexes the `firstname` and `lastname` fields. - -The new document is available immediately from any node in the cluster. -You can retrieve it with a GET request that specifies its document ID: - -[source,console] ----- -GET /customer/_doc/1 ----- -// TEST[continued] - -To add multiple documents in one request, use the `_bulk` API. -Bulk data must be newline-delimited JSON (NDJSON). -Each line must end in a newline character (`\n`), including the last line. - -[source,console] ----- -PUT customer/_bulk -{ "create": { } } -{ "firstname": "Monica","lastname":"Rambeau"} -{ "create": { } } -{ "firstname": "Carol","lastname":"Danvers"} -{ "create": { } } -{ "firstname": "Wanda","lastname":"Maximoff"} -{ "create": { } } -{ "firstname": "Jennifer","lastname":"Takeda"} ----- -// TEST[continued] - -[discrete] -=== Search - -Indexed documents are available for search in near real-time. -The following search matches all customers with a first name of _Jennifer_ -in the `customer` index. - -[source,console] ----- -GET customer/_search -{ - "query" : { - "match" : { "firstname": "Jennifer" } - } -} ----- -// TEST[continued] - -[discrete] -=== Explore - -You can use Discover in Kibana to interactively search and filter your data. -From there, you can start creating visualizations and building and sharing dashboards. - -To get started, create a _data view_ that connects to one or more Elasticsearch indices, -data streams, or index aliases. - -. Go to **Management > Stack Management > Kibana > Data Views**. -. Select **Create data view**. -. Enter a name for the data view and a pattern that matches one or more indices, -such as _customer_. -. Select **Save data view to Kibana**. - -To start exploring, go to **Analytics > Discover**. - - diff --git a/docs/reference/tab-widgets/api-call-widget.asciidoc b/docs/reference/tab-widgets/api-call-widget.asciidoc index adc2aa86f1c0e..4ad3c45366434 100644 --- a/docs/reference/tab-widgets/api-call-widget.asciidoc +++ b/docs/reference/tab-widgets/api-call-widget.asciidoc @@ -12,7 +12,7 @@ aria-controls="self-managed-tab-api-call" id="self-managed-api-call" tabindex="-1"> - Self-managed + Local Dev (Docker)

- Elasticsearch Service + Elastic Cloud
> for advanced Docker documentation. - -. Run the following Docker commands: -+ -[source,sh,subs="attributes"] ----- -docker network create elastic -docker pull {docker-image} -docker run --name es01 --net elastic -p 9200:9200 -p 9300:9300 -e "discovery.type=single-node" -t {docker-image} ----- - -. Copy the generated `elastic` password and enrollment token, which are output to your terminal. -You'll use these to enroll {kib} with your {es} cluster and log in. -These credentials are only shown when you start {es} for the first time. -+ -We recommend storing the `elastic` password as an environment variable in your shell. Example: -+ -[source,sh] ----- -export ELASTIC_PASSWORD="your_password" ----- -+ -. Copy the `http_ca.crt` SSL certificate from the container to your local machine. -+ -[source,sh] ----- -docker cp es01:/usr/share/elasticsearch/config/certs/http_ca.crt . ----- -+ -. Make a REST API call to {es} to ensure the {es} container is running. -+ -[source,sh] ----- -curl --cacert http_ca.crt -u elastic:$ELASTIC_PASSWORD https://localhost:9200 ----- -// NOTCONSOLE - -*Run {kib}* - -{kib} is the user interface for Elastic. -It's great for getting started with {es} and exploring your data. -We'll be using the Dev Tools *Console* in {kib} to make REST API calls to {es}. - -In a new terminal session, start {kib} and connect it to your {es} container: - -[source,sh,subs="attributes"] ----- -docker pull {kib-docker-image} -docker run --name kibana --net elastic -p 5601:5601 {kib-docker-image} ----- - -When you start {kib}, a unique URL is output to your terminal. -To access {kib}: - -. Open the generated URL in your browser. -. Paste the enrollment token that you copied earlier, to connect your {kib} instance with {es}. -. Log in to {kib} as the `elastic` user with the password that was generated when you started {es}. +Refer to our <> to quickly spin up a local development environment in Docker. If you don't need {kib}, you'll only need one `docker run` command to start {es}. Please note that this setup is *not suitable for production use*. // end::self-managed[] \ No newline at end of file diff --git a/docs/reference/tab-widgets/semantic-search/hybrid-search.asciidoc b/docs/reference/tab-widgets/semantic-search/hybrid-search.asciidoc index 47403df450bd2..93edc0918614d 100644 --- a/docs/reference/tab-widgets/semantic-search/hybrid-search.asciidoc +++ b/docs/reference/tab-widgets/semantic-search/hybrid-search.asciidoc @@ -1,7 +1,7 @@ // tag::elser[] Hybrid search between a semantic and lexical query can be achieved by using an -< as part of your search request. Provide a +<> as part of your search request. Provide a `text_expansion` query and a full-text query as <> for the `rrf` retriever. The `rrf` retriever uses <> to rank the top documents. diff --git a/docs/reference/transform/images/transform-rule.png b/docs/reference/transform/images/transform-rule.png deleted file mode 100644 index c43dd6c1be929..0000000000000 Binary files a/docs/reference/transform/images/transform-rule.png and /dev/null differ diff --git a/docs/reference/transform/transform-alerts.asciidoc b/docs/reference/transform/transform-alerts.asciidoc index e3ea82d34ec2e..988dc5effe956 100644 --- a/docs/reference/transform/transform-alerts.asciidoc +++ b/docs/reference/transform/transform-alerts.asciidoc @@ -18,19 +18,20 @@ refer to You can create {transform} rules under **{stack-manage-app} > {rules-ui}**. -. On the *Create rule* window, give a name to the rule and optionally provide -tags. Select the {transform} health rule type: +. Click *Create rule* and select the {transform} health rule type. + +. Give a name to the rule and optionally provide tags. + +. Select the {transform} or {transforms} to include. You can also use a special +character (`*`) to apply the rule to all your {transforms}. {transforms-cap} +created after the rule are automatically included. + -- [role="screenshot"] -image::images/transform-rule.png["Creating a transform health rule",500] +image::images/transform-check-config.png["Selecting health check",500] // NOTE: This is screenshot is automatically generated. Do not edit it directly. -- -. Select the {transform} or {transforms} to include. You can also use a special -character (`*`) to apply the rule to all your {transforms}. {transforms-cap} -created after the rule are automatically included. - . The following health checks are available and enabled by default: + -- @@ -41,10 +42,6 @@ _{transform-cap} is not started_:: _Unhealthy {transform}_:: Get alerts when a {transform} has an unhealthy status. The notification message contains status details and related issues. - -[role="screenshot"] -image::images/transform-check-config.png["Selecting health check",500] -// NOTE: This is screenshot is automatically generated. Do not edit it directly. -- . Set the check interval, which defines how often to evaluate the rule conditions. diff --git a/docs/reference/troubleshooting.asciidoc b/docs/reference/troubleshooting.asciidoc index 01ef39b69c529..ceff8619062c4 100644 --- a/docs/reference/troubleshooting.asciidoc +++ b/docs/reference/troubleshooting.asciidoc @@ -138,3 +138,5 @@ include::troubleshooting/troubleshooting-searches.asciidoc[] include::troubleshooting/troubleshooting-shards-capacity.asciidoc[] include::troubleshooting/troubleshooting-unbalanced-cluster.asciidoc[] + +include::troubleshooting/diagnostic.asciidoc[] diff --git a/docs/reference/troubleshooting/diagnostic.asciidoc b/docs/reference/troubleshooting/diagnostic.asciidoc new file mode 100644 index 0000000000000..a944ca88d285d --- /dev/null +++ b/docs/reference/troubleshooting/diagnostic.asciidoc @@ -0,0 +1,152 @@ +[[diagnostic]] +== Capturing diagnostics +++++ +Capture diagnostics +++++ +:keywords: Elasticsearch diagnostic, diagnostics + +The {es} https://github.com/elastic/support-diagnostics[Support Diagnostic] tool captures a point-in-time snapshot of cluster statistics and most settings. +It works against all {es} versions. + +This information can be used to troubleshoot problems with your cluster. For examples of issues that you can troubleshoot using Support Diagnostic tool output, refer to https://www.elastic.co/blog/why-does-elastic-support-keep-asking-for-diagnostic-files[the Elastic blog]. + +You can generate diagnostic information using this tool before you contact https://support.elastic.co[Elastic Support] or +https://discuss.elastic.co[Elastic Discuss] to minimize turnaround time. + +[discrete] +[[diagnostic-tool-requirements]] +=== Requirements + +- Java Runtime Environment or Java Development Kit v1.8 or higher + +[discrete] +[[diagnostic-tool-access]] +=== Access the tool + +The Support Diagnostic tool is included as a sub-library in some Elastic deployments: + +* {ece}: Located under **{ece}** > **Deployment** > **Operations** > +**Prepare Bundle** > **{es}**. +* {eck}: Run as https://www.elastic.co/guide/en/cloud-on-k8s/current/k8s-take-eck-dump.html[`eck-diagnostics`]. + +You can also directly download the `diagnostics-X.X.X-dist.zip` file for the latest Support Diagnostic release +from https://github.com/elastic/support-diagnostics/releases/latest[the `support-diagnostic` repo]. + + +[discrete] +[[diagnostic-capture]] +=== Capture diagnostic information + +To capture an {es} diagnostic: + +. In a terminal, verify that your network and user permissions are sufficient to connect to your {es} +cluster by polling the cluster's <>. ++ +For example, with the parameters `host:localhost`, `port:9200`, and `username:elastic`, you'd use the following curl request: ++ +[source,sh] +---- +curl -X GET -k -u elastic -p https://localhost:9200/_cluster/health +---- +// NOTCONSOLE ++ +If you receive a an HTTP 200 `OK` response, then you can proceed to the next step. If you receive a different +response code, then <> before proceeding. + +. Using the same environment parameters, run the diagnostic tool script. ++ +For information about the parameters that you can pass to the tool, refer to the https://github.com/elastic/support-diagnostics#standard-options[diagnostic +parameter reference]. ++ +The following command options are recommended: ++ +**Unix-based systems** ++ +[source,sh] +---- +sudo ./diagnostics.sh --type local --host localhost --port 9200 -u elastic -p --bypassDiagVerify --ssl --noVerify +---- ++ +**Windows** ++ +[source,sh] +---- +sudo .\diagnostics.bat --type local --host localhost --port 9200 -u elastic -p --bypassDiagVerify --ssl --noVerify +---- ++ +[TIP] +.Script execution modes +==== +You can execute the script in three https://github.com/elastic/support-diagnostics#diagnostic-types[modes]: + +* `local` (default, recommended): Polls the <>, +gathers operating system info, and captures cluster and GC logs. + +* `remote`: Establishes an ssh session +to the applicable target server to pull the same information as `local`. + +* `api`: Polls the <>. All other data must be +collected manually. +==== + +. When the script has completed, verify that no errors were logged to `diagnostic.log`. +If the log file contains errors, then refer to <>. + +. If the script completed without errors, then an archive with the format `-diagnostics-.zip` is created in the working directory, or an output directory you have specified. You can review or share the diagnostic archive as needed. + +[discrete] +[[diagnostic-non-200]] +=== Diagnose a non-200 cluster health response + +When you poll your cluster health, if you receive any response other than `200 0K`, then the diagnostic tool +might not work as intended. The following are possible error codes and their resolutions: + +HTTP 401 `UNAUTHENTICATED`:: +Additional information in the error will usually indicate either +that your `username:password` pair is invalid, or that your `.security` +index is unavailable and you need to setup a temporary +<> user with `role:superuser` to authenticate. + +HTTP 403 `UNAUTHORIZED`:: +Your `username` is recognized but +has insufficient permissions to run the diagnostic. Either use a different +username or elevate the user's privileges. + +HTTP 429 `TOO_MANY_REQUESTS` (for example, `circuit_breaking_exception`):: +Your username authenticated and authorized, but the cluster is under +sufficiently high strain that it's not responding to API calls. These +responses are usually intermittent. You can proceed with running the diagnostic, +but the diagnostic results might be incomplete. + +HTTP 504 `BAD_GATEWAY`:: +Your network is experiencing issues reaching the cluster. You might be using a proxy or firewall. +Consider running the diagnostic tool from a different location, confirming your port, or using an IP +instead of a URL domain. + +HTTP 503 `SERVICE_UNAVAILABLE` (for example, `master_not_discovered_exception`):: +Your cluster does not currently have an elected master node, which is +required for it to be API-responsive. This might be temporary while the master +node rotates. If the issue persists, then <> +before proceeding. + +[discrete] +[[diagnostic-log-errors]] +=== Diagnose errors in `diagnostic.log` + +The following are common errors that you might encounter when running the diagnostic tool: + +* `Error: Could not find or load main class com.elastic.support.diagnostics.DiagnosticApp` ++ +This indicates that you accidentally downloaded the source code file +instead of `diagnostics-X.X.X-dist.zip` from the releases page. + +* `Could not retrieve the Elasticsearch version due to a system or network error - unable to continue.` ++ +This indicates that the diagnostic couldn't run commands against the cluster. +Poll the cluster's health again, and ensure that you're using the same parameters +when you run the dianostic batch or shell file. + +* A `security_exception` that includes `is unauthorized for user`: ++ +The provided user has insufficient admin permissions to run the diagnostic tool. Use another +user, or grant the user `role:superuser` privileges. \ No newline at end of file diff --git a/libs/cli/src/main/java/org/elasticsearch/cli/Command.java b/libs/cli/src/main/java/org/elasticsearch/cli/Command.java index 201f0810f4d9b..32c4446e71dd2 100644 --- a/libs/cli/src/main/java/org/elasticsearch/cli/Command.java +++ b/libs/cli/src/main/java/org/elasticsearch/cli/Command.java @@ -17,6 +17,7 @@ import java.io.Closeable; import java.io.IOException; +import java.io.StringWriter; import java.util.Arrays; /** @@ -45,7 +46,7 @@ public Command(final String description) { } /** Parses options for this command from args and executes it. */ - public final int main(String[] args, Terminal terminal, ProcessInfo processInfo) throws Exception { + public final int main(String[] args, Terminal terminal, ProcessInfo processInfo) throws IOException { try { mainWithoutErrorHandling(args, terminal, processInfo); } catch (OptionException e) { @@ -59,6 +60,14 @@ public final int main(String[] args, Terminal terminal, ProcessInfo processInfo) } printUserException(terminal, e); return e.exitCode; + } catch (IOException ioe) { + terminal.errorPrintln(ioe); + return ExitCodes.IO_ERROR; + } catch (Throwable t) { + // It's acceptable to catch Throwable at this point: + // We're about to exit and only want to print the stacktrace with appropriate formatting (e.g. JSON). + terminal.errorPrintln(t); + return ExitCodes.CODE_ERROR; } return ExitCodes.OK; } @@ -96,15 +105,17 @@ public OptionSet parseOptions(String[] args) { /** Prints a help message for the command to the terminal. */ private void printHelp(Terminal terminal, boolean toStdError) throws IOException { + StringWriter writer = new StringWriter(); + parser.printHelpOn(writer); if (toStdError) { terminal.errorPrintln(description); terminal.errorPrintln(""); - parser.printHelpOn(terminal.getErrorWriter()); + terminal.errorPrintln(writer.toString()); } else { terminal.println(description); terminal.println(""); printAdditionalHelp(terminal); - parser.printHelpOn(terminal.getWriter()); + terminal.println(writer.toString()); } } diff --git a/libs/cli/src/main/java/org/elasticsearch/cli/Terminal.java b/libs/cli/src/main/java/org/elasticsearch/cli/Terminal.java index 69cb76636a996..aaf233438f263 100644 --- a/libs/cli/src/main/java/org/elasticsearch/cli/Terminal.java +++ b/libs/cli/src/main/java/org/elasticsearch/cli/Terminal.java @@ -72,6 +72,13 @@ protected Terminal(Reader reader, PrintWriter outWriter, PrintWriter errWriter) this.errWriter = errWriter; } + /** + * Constructs a terminal instance from a delegate instance. + */ + protected Terminal(Terminal delegate) { + this(delegate.reader, delegate.outWriter, delegate.errWriter); + } + /** * Sets the verbosity of the terminal. * @@ -113,14 +120,12 @@ public final Reader getReader() { return reader; } - /** Returns a Writer which can be used to write to the terminal directly using standard output. */ - public final PrintWriter getWriter() { - return outWriter; - } - - /** Returns a Writer which can be used to write to the terminal directly using standard error. */ - public final PrintWriter getErrorWriter() { - return errWriter; + /** + * Returns a line based OutputStream wrapping this Terminal's println. + * Note, this OutputStream is not thread-safe! + */ + public final OutputStream asLineOutputStream(Charset charset) { + return new LineOutputStream(charset); } /** @@ -138,7 +143,7 @@ public InputStream getInputStream() { * Returns an OutputStream which can be used to write to the terminal directly using standard output. * *

May return {@code null} if this Terminal is not capable of binary output. - * This corresponds with the underlying stream of bytes written to by {@link #getWriter()}. + * This corresponds with the underlying stream of bytes written to by {@link #println(CharSequence)}. */ @Nullable public OutputStream getOutputStream() { @@ -152,12 +157,12 @@ public final void println(CharSequence msg) { /** Prints a line to the terminal at {@code verbosity} level. */ public final void println(Verbosity verbosity, CharSequence msg) { - print(verbosity, outWriter, msg, true); + print(verbosity, outWriter, msg, true, true); } /** Prints message to the terminal's standard output at {@code verbosity} level, without a newline. */ public final void print(Verbosity verbosity, String msg) { - print(verbosity, outWriter, msg, false); + print(verbosity, outWriter, msg, false, true); } /** @@ -165,30 +170,49 @@ public final void print(Verbosity verbosity, String msg) { * * Subclasses may override if the writers are not implemented. */ - protected void print(Verbosity verbosity, PrintWriter writer, CharSequence msg, boolean newline) { + protected void print(Verbosity verbosity, PrintWriter writer, CharSequence msg, boolean newline, boolean flush) { if (isPrintable(verbosity)) { if (newline) { writer.println(msg); } else { writer.print(msg); } - writer.flush(); + if (flush) { + writer.flush(); + } } } /** Prints a line to the terminal's standard error at {@link Verbosity#NORMAL} verbosity level, without a newline. */ public final void errorPrint(Verbosity verbosity, String msg) { - print(verbosity, errWriter, msg, false); + print(verbosity, errWriter, msg, false, true); } /** Prints a line to the terminal's standard error at {@link Verbosity#NORMAL} verbosity level. */ public final void errorPrintln(String msg) { - errorPrintln(Verbosity.NORMAL, msg); + print(Verbosity.NORMAL, errWriter, msg, true, true); } /** Prints a line to the terminal's standard error at {@code verbosity} level. */ public final void errorPrintln(Verbosity verbosity, String msg) { - print(verbosity, errWriter, msg, true); + print(verbosity, errWriter, msg, true, true); + } + + /** Prints a line to the terminal's standard error at {@code verbosity} level, with an optional flush */ + public final void errorPrintln(Verbosity verbosity, String msg, boolean flush) { + print(verbosity, errWriter, msg, true, flush); + } + + /** Prints a stacktrace to the terminal's standard error at {@code verbosity} level. */ + public void errorPrintln(Verbosity verbosity, Throwable throwable) { + if (isPrintable(verbosity)) { + throwable.printStackTrace(errWriter); + } + } + + /** Prints a stacktrace to the terminal's standard error at {@link Verbosity#SILENT} verbosity level. */ + public void errorPrintln(Throwable throwable) { + errorPrintln(Verbosity.SILENT, throwable); } /** Checks if is enough {@code verbosity} level to be printed */ @@ -339,4 +363,54 @@ public OutputStream getOutputStream() { return System.out; } } + + /** A line based OutputStream wrapping this Terminal's println, not thread-safe! */ + private class LineOutputStream extends OutputStream { + static final int DEFAULT_BUFFER_LENGTH = 1024; + static final int MAX_BUFFER_LENGTH = DEFAULT_BUFFER_LENGTH * 8; + + private final Charset charset; + private byte[] bytes = new byte[DEFAULT_BUFFER_LENGTH]; + private int count = 0; + + LineOutputStream(Charset charset) { + this.charset = charset; + } + + @Override + public void write(int b) { + if (b == 0) return; + if (b == '\n') { + flush(true); + return; + } + if (count == bytes.length) { + if (count >= MAX_BUFFER_LENGTH) { + flush(false); + } else { + bytes = Arrays.copyOf(bytes, 2 * bytes.length); + } + } + bytes[count++] = (byte) b; + } + + private void flush(boolean newline) { + if (newline && count > 0 && bytes[count - 1] == '\r') { + --count; // drop CR on windows as well + } + String msg = count > 0 ? new String(bytes, 0, count, charset) : ""; + print(Verbosity.NORMAL, outWriter, msg, newline, true); + count = 0; + if (bytes.length > DEFAULT_BUFFER_LENGTH) { + bytes = new byte[DEFAULT_BUFFER_LENGTH]; + } + } + + @Override + public void flush() { + if (count > 0) { + flush(false); + } + } + } } diff --git a/libs/cli/src/test/java/org/elasticsearch/cli/TerminalTests.java b/libs/cli/src/test/java/org/elasticsearch/cli/TerminalTests.java index 9c1faf911a829..dffb93ebbf230 100644 --- a/libs/cli/src/test/java/org/elasticsearch/cli/TerminalTests.java +++ b/libs/cli/src/test/java/org/elasticsearch/cli/TerminalTests.java @@ -11,6 +11,17 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase.WithoutSecurityManager; +import java.io.IOException; +import java.io.OutputStream; +import java.io.PrintWriter; +import java.nio.charset.StandardCharsets; + +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; + @WithoutSecurityManager public class TerminalTests extends ESTestCase { @@ -20,4 +31,33 @@ public void testSystemTerminalIfRedirected() { // Otherwise, JDK 22 doesn't provide a console if redirected. assertEquals(Terminal.SystemTerminal.class, Terminal.DEFAULT.getClass()); } + + public void testTerminalAsLineOutputStream() throws IOException { + PrintWriter stdOut = mock("stdOut"); + PrintWriter stdErr = mock("stdErr"); + + OutputStream out = new Terminal(mock("reader"), stdOut, stdErr) { + }.asLineOutputStream(StandardCharsets.UTF_8); + + out.write("123".getBytes(StandardCharsets.UTF_8)); + out.write("456".getBytes(StandardCharsets.UTF_8)); + out.write("789\r\n".getBytes(StandardCharsets.UTF_8)); // CR is removed as well + + verify(stdOut).println(eq((CharSequence) "123456789")); + verify(stdOut).flush(); + verifyNoMoreInteractions(stdOut, stdErr); + + out.write("\n".getBytes(StandardCharsets.UTF_8)); + verify(stdOut).println(eq((CharSequence) "")); + verify(stdOut, times(2)).flush(); + verifyNoMoreInteractions(stdOut, stdErr); + + out.write("a".getBytes(StandardCharsets.UTF_8)); + out.flush(); + verify(stdOut).print(eq((CharSequence) "a")); + verify(stdOut, times(3)).flush(); + + out.flush(); + verifyNoMoreInteractions(stdOut, stdErr); + } } diff --git a/libs/core/src/main/java/org/elasticsearch/core/ReleasableIterator.java b/libs/core/src/main/java/org/elasticsearch/core/ReleasableIterator.java index 68a4a136c5308..83a68c984a684 100644 --- a/libs/core/src/main/java/org/elasticsearch/core/ReleasableIterator.java +++ b/libs/core/src/main/java/org/elasticsearch/core/ReleasableIterator.java @@ -46,4 +46,30 @@ public String toString() { }; } + + /** + * Returns an empty iterator over the supplied value. + */ + static ReleasableIterator empty() { + return new ReleasableIterator<>() { + @Override + public boolean hasNext() { + return false; + } + + @Override + public T next() { + assert false : "hasNext is always false so next should never be called"; + return null; + } + + @Override + public void close() {} + + @Override + public String toString() { + return "ReleasableIterator[]"; + } + }; + } } diff --git a/libs/core/src/main/java/org/elasticsearch/core/RestApiVersion.java b/libs/core/src/main/java/org/elasticsearch/core/RestApiVersion.java index 5153ba688d6a9..74acb00925e5a 100644 --- a/libs/core/src/main/java/org/elasticsearch/core/RestApiVersion.java +++ b/libs/core/src/main/java/org/elasticsearch/core/RestApiVersion.java @@ -61,4 +61,15 @@ public static Predicate onOrAfter(RestApiVersion restApiVersion) }; } + public static RestApiVersion forMajor(int major) { + switch (major) { + case 7 -> { + return V_7; + } + case 8 -> { + return V_8; + } + default -> throw new IllegalArgumentException("Unknown REST API version " + major); + } + } } diff --git a/libs/native/libraries/build.gradle b/libs/native/libraries/build.gradle index 168eb533fea74..7a545787bbdae 100644 --- a/libs/native/libraries/build.gradle +++ b/libs/native/libraries/build.gradle @@ -18,7 +18,7 @@ configurations { } var zstdVersion = "1.5.5" -var vecVersion = "1.0.6" +var vecVersion = "1.0.8" repositories { exclusiveContent { diff --git a/libs/native/src/main/java/org/elasticsearch/nativeaccess/PosixNativeAccess.java b/libs/native/src/main/java/org/elasticsearch/nativeaccess/PosixNativeAccess.java index 56017d3a8a20a..c390cfc9289c6 100644 --- a/libs/native/src/main/java/org/elasticsearch/nativeaccess/PosixNativeAccess.java +++ b/libs/native/src/main/java/org/elasticsearch/nativeaccess/PosixNativeAccess.java @@ -45,7 +45,15 @@ public Optional getVectorSimilarityFunctions() { } static boolean isNativeVectorLibSupported() { - return Runtime.version().feature() >= 21 && isMacOrLinuxAarch64() && checkEnableSystemProperty(); + return Runtime.version().feature() >= 21 && (isMacOrLinuxAarch64() || isLinuxAmd64()) && checkEnableSystemProperty(); + } + + /** + * Returns true iff the architecture is x64 (amd64) and the OS Linux (the OS we currently support for the native lib). + */ + static boolean isLinuxAmd64() { + String name = System.getProperty("os.name"); + return (name.startsWith("Linux")) && System.getProperty("os.arch").equals("amd64"); } /** Returns true iff the OS is Mac or Linux, and the architecture is aarch64. */ diff --git a/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/JdkSystemdLibrary.java b/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/JdkSystemdLibrary.java index 5313984ac6d61..0af87154960ad 100644 --- a/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/JdkSystemdLibrary.java +++ b/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/JdkSystemdLibrary.java @@ -17,7 +17,10 @@ import java.lang.foreign.MemorySegment; import java.lang.invoke.MethodHandle; import java.nio.file.Files; +import java.nio.file.Path; import java.nio.file.Paths; +import java.util.Arrays; +import java.util.List; import static java.lang.foreign.ValueLayout.ADDRESS; import static java.lang.foreign.ValueLayout.JAVA_INT; @@ -26,31 +29,49 @@ class JdkSystemdLibrary implements SystemdLibrary { static { - System.load(findLibSystemd()); - } - - // On some systems libsystemd does not have a non-versioned symlink. System.loadLibrary only knows how to find - // non-versioned library files. So we must manually check the library path to find what we need. - static String findLibSystemd() { - final String libsystemd = "libsystemd.so.0"; - String libpath = System.getProperty("java.library.path"); - for (String basepathStr : libpath.split(":")) { - var basepath = Paths.get(basepathStr); - if (Files.exists(basepath) == false) { - continue; + // Find and load libsystemd. We attempt all instances of + // libsystemd in case of multiarch systems, and stop when + // one is successfully loaded. If none can be loaded, + // UnsatisfiedLinkError will be thrown. + List paths = findLibSystemd(); + if (paths.isEmpty()) { + String libpath = System.getProperty("java.library.path"); + throw new UnsatisfiedLinkError("Could not find libsystemd in java.library.path: " + libpath); + } + UnsatisfiedLinkError last = null; + for (String path : paths) { + try { + System.load(path); + last = null; + break; + } catch (UnsatisfiedLinkError e) { + last = e; } - try (var stream = Files.walk(basepath)) { + } + if (last != null) { + throw last; + } + } - var foundpath = stream.filter(Files::isDirectory).map(p -> p.resolve(libsystemd)).filter(Files::exists).findAny(); - if (foundpath.isPresent()) { - return foundpath.get().toAbsolutePath().toString(); - } + // findLibSystemd returns a list of paths to instances of libsystemd + // found within java.library.path. + static List findLibSystemd() { + // Note: on some systems libsystemd does not have a non-versioned symlink. + // System.loadLibrary only knows how to find non-versioned library files, + // so we must manually check the library path to find what we need. + final Path libsystemd = Paths.get("libsystemd.so.0"); + final String libpath = System.getProperty("java.library.path"); + return Arrays.stream(libpath.split(":")).map(Paths::get).filter(Files::exists).flatMap(p -> { + try { + return Files.find( + p, + Integer.MAX_VALUE, + (fp, attrs) -> (attrs.isDirectory() == false && fp.getFileName().equals(libsystemd)) + ); } catch (IOException e) { throw new UncheckedIOException(e); } - - } - throw new UnsatisfiedLinkError("Could not find " + libsystemd + " in java.library.path: " + libpath); + }).map(p -> p.toAbsolutePath().toString()).toList(); } private static final MethodHandle sd_notify$mh = downcallHandle("sd_notify", FunctionDescriptor.of(JAVA_INT, JAVA_INT, ADDRESS)); diff --git a/libs/native/src/test/java/org/elasticsearch/nativeaccess/VectorSimilarityFunctionsTests.java b/libs/native/src/test/java/org/elasticsearch/nativeaccess/VectorSimilarityFunctionsTests.java index adf32874c04f1..8c4cbb688abcd 100644 --- a/libs/native/src/test/java/org/elasticsearch/nativeaccess/VectorSimilarityFunctionsTests.java +++ b/libs/native/src/test/java/org/elasticsearch/nativeaccess/VectorSimilarityFunctionsTests.java @@ -37,7 +37,9 @@ public boolean supported() { var arch = System.getProperty("os.arch"); var osName = System.getProperty("os.name"); - if (jdkVersion >= 21 && arch.equals("aarch64") && (osName.startsWith("Mac") || osName.equals("Linux"))) { + if (jdkVersion >= 21 + && ((arch.equals("aarch64") && (osName.startsWith("Mac") || osName.equals("Linux"))) + || (arch.equals("amd64") && osName.equals("Linux")))) { assertThat(vectorSimilarityFunctions, isPresent()); return true; } else { diff --git a/libs/vec/native/Dockerfile b/libs/vec/native/Dockerfile index 25dcf4d4854d0..66eb7e92ef479 100644 --- a/libs/vec/native/Dockerfile +++ b/libs/vec/native/Dockerfile @@ -4,6 +4,7 @@ RUN apt update RUN apt install -y gcc g++ openjdk-17-jdk COPY . /workspace WORKDIR /workspace -RUN ./gradlew --quiet --console=plain clean vecSharedLibrary +RUN ./gradlew --quiet --console=plain clean buildSharedLibrary +RUN strip --strip-unneeded build/output/libvec.so -CMD cat build/libs/vec/shared/libvec.so +CMD cat build/output/libvec.so diff --git a/libs/vec/native/build.gradle b/libs/vec/native/build.gradle index 6a658da0644b7..7edf46d406862 100644 --- a/libs/vec/native/build.gradle +++ b/libs/vec/native/build.gradle @@ -12,9 +12,10 @@ var os = org.gradle.internal.os.OperatingSystem.current() // To update this library run publish_vec_binaries.sh ( or ./gradlew vecSharedLibrary ) // Or // For local development, build the docker image with: -// docker build --platform linux/arm64 --progress=plain . +// docker build --platform linux/arm64 --progress=plain . (for aarch64) +// docker build --platform linux/amd64 --progress=plain . (for x64) // Grab the image id from the console output, then, e.g. -// docker run 9c9f36564c148b275aeecc42749e7b4580ded79dcf51ff6ccc008c8861e7a979 > build/libs/vec/shared/libvec.so +// docker run 9c9f36564c148b275aeecc42749e7b4580ded79dcf51ff6ccc008c8861e7a979 > build/libs/vec/shared/$arch/libvec.so // // To run tests and benchmarks on a locally built libvec, // 1. Temporarily comment out the download in libs/native/library/build.gradle @@ -30,26 +31,83 @@ var os = org.gradle.internal.os.OperatingSystem.current() group = 'org.elasticsearch' +def platformName = System.getProperty("os.arch"); + model { + platforms { + aarch64 { + architecture "aarch64" + } + amd64 { + architecture "x86-64" + } + } toolChains { gcc(Gcc) { target("aarch64") { cCompiler.executable = "/usr/bin/gcc" + cCompiler.withArguments { args -> args.addAll(["-O3", "-std=c99", "-march=armv8-a"]) } + } + target("amd64") { + cCompiler.executable = "/usr/bin/gcc" + cCompiler.withArguments { args -> args.addAll(["-O3", "-std=c99", "-march=core-avx2", "-Wno-incompatible-pointer-types"]) } } } - clang(Clang) - } - platforms { - aarch64 { - architecture "aarch64" + cl(VisualCpp) { + eachPlatform { toolchain -> + def platform = toolchain.getPlatform() + if (platform.name == "x64") { + cCompiler.withArguments { args -> args.addAll(["/O2", "/LD", "-march=core-avx2"]) } + } + } + } + clang(Clang) { + target("amd64") { + cCompiler.withArguments { args -> args.addAll(["-O3", "-std=c99", "-march=core-avx2"]) } + } } } components { vec(NativeLibrarySpec) { targetPlatform "aarch64" - binaries.withType(SharedLibraryBinarySpec) { - cCompiler.args "-O3", "-std=c99", "-march=armv8-a" + targetPlatform "amd64" + + sources { + c { + source { + srcDir "src/vec/c/${platformName}/" + include "*.c" + } + exportedHeaders { + srcDir "src/vec/headers/" + } + } + } + } + } +} + +tasks.register('buildSharedLibrary') { + description = 'Assembles native shared library for the host architecture' + if (platformName.equals("aarch64")) { + dependsOn tasks.vecAarch64SharedLibrary + doLast { + copy { + from tasks.linkVecAarch64SharedLibrary.outputs.files.files + into layout.buildDirectory.dir('output'); + duplicatesStrategy = 'INCLUDE' + } + } + } else if (platformName.equals("amd64")) { + dependsOn tasks.vecAmd64SharedLibrary + doLast { + copy { + from tasks.linkVecAmd64SharedLibrary.outputs.files.files + into layout.buildDirectory.dir('output'); + duplicatesStrategy = 'INCLUDE' } } + } else { + throw new GradleException("Unsupported platform: " + platformName) } } diff --git a/libs/vec/native/publish_vec_binaries.sh b/libs/vec/native/publish_vec_binaries.sh index e17690160e253..2ed6c750ab9e8 100755 --- a/libs/vec/native/publish_vec_binaries.sh +++ b/libs/vec/native/publish_vec_binaries.sh @@ -19,7 +19,7 @@ if [ -z "$ARTIFACTORY_API_KEY" ]; then exit 1; fi -VERSION="1.0.6" +VERSION="1.0.8" ARTIFACTORY_REPOSITORY="${ARTIFACTORY_REPOSITORY:-https://artifactory.elastic.dev/artifactory/elasticsearch-native/}" TEMP=$(mktemp -d) @@ -29,16 +29,22 @@ if curl -sS -I --fail --location "${ARTIFACTORY_REPOSITORY}/org/elasticsearch/ve fi echo 'Building Darwin binary...' -./gradlew --quiet --console=plain vecSharedLibrary +./gradlew --quiet --console=plain vecAarch64SharedLibrary echo 'Building Linux binary...' DOCKER_IMAGE=$(docker build --platform linux/arm64 --quiet .) -docker run $DOCKER_IMAGE > build/libs/vec/shared/libvec.so +docker run $DOCKER_IMAGE > build/libs/vec/shared/aarch64/libvec.so + +echo 'Building Linux x64 binary...' +DOCKER_IMAGE=$(docker build --platform linux/amd64 --quiet .) +docker run --platform linux/amd64 $DOCKER_IMAGE > build/libs/vec/shared/amd64/libvec.so mkdir -p $TEMP/darwin-aarch64 mkdir -p $TEMP/linux-aarch64 -cp build/libs/vec/shared/libvec.dylib $TEMP/darwin-aarch64/ -cp build/libs/vec/shared/libvec.so $TEMP/linux-aarch64/ +mkdir -p $TEMP/linux-x64 +cp build/libs/vec/shared/aarch64/libvec.dylib $TEMP/darwin-aarch64/ +cp build/libs/vec/shared/aarch64/libvec.so $TEMP/linux-aarch64/ +cp build/libs/vec/shared/amd64/libvec.so $TEMP/linux-x64/ echo 'Uploading to Artifactory...' (cd $TEMP && zip -rq - .) | curl -sS -X PUT -H "X-JFrog-Art-Api: ${ARTIFACTORY_API_KEY}" --data-binary @- --location "${ARTIFACTORY_REPOSITORY}/org/elasticsearch/vec/${VERSION}/vec-${VERSION}.zip" diff --git a/libs/vec/native/src/vec/c/vec.c b/libs/vec/native/src/vec/c/aarch64/vec.c similarity index 99% rename from libs/vec/native/src/vec/c/vec.c rename to libs/vec/native/src/vec/c/aarch64/vec.c index 05dfe64a3be9b..478e5e84d3859 100644 --- a/libs/vec/native/src/vec/c/vec.c +++ b/libs/vec/native/src/vec/c/aarch64/vec.c @@ -121,7 +121,7 @@ static inline int32_t sqr7u_inner(int8_t *a, int8_t *b, size_t dims) { EXPORT int32_t sqr7u(int8_t* a, int8_t* b, size_t dims) { int32_t res = 0; int i = 0; - if (i > SQR7U_STRIDE_BYTES_LEN) { + if (dims > SQR7U_STRIDE_BYTES_LEN) { i += dims & ~(SQR7U_STRIDE_BYTES_LEN - 1); res = sqr7u_inner(a, b, i); } diff --git a/libs/vec/native/src/vec/c/amd64/vec.c b/libs/vec/native/src/vec/c/amd64/vec.c new file mode 100644 index 0000000000000..c9a49ad2d1d4d --- /dev/null +++ b/libs/vec/native/src/vec/c/amd64/vec.c @@ -0,0 +1,150 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +#include +#include +#include "vec.h" + +#include +#include + +#ifndef DOT7U_STRIDE_BYTES_LEN +#define DOT7U_STRIDE_BYTES_LEN 32 // Must be a power of 2 +#endif + +#ifndef SQR7U_STRIDE_BYTES_LEN +#define SQR7U_STRIDE_BYTES_LEN 32 // Must be a power of 2 +#endif + +#ifdef _MSC_VER +#include +#elif __GNUC__ +#include +#elif __clang__ +#include +#endif + +// Multi-platform CPUID "intrinsic"; it takes as input a "functionNumber" (or "leaf", the eax registry). "Subleaf" +// is always 0. Output is stored in the passed output parameter: output[0] = eax, output[1] = ebx, output[2] = ecx, +// output[3] = edx +static inline void cpuid(int output[4], int functionNumber) { +#if defined(__GNUC__) || defined(__clang__) + // use inline assembly, Gnu/AT&T syntax + int a, b, c, d; + __asm("cpuid" : "=a"(a), "=b"(b), "=c"(c), "=d"(d) : "a"(functionNumber), "c"(0) : ); + output[0] = a; + output[1] = b; + output[2] = c; + output[3] = d; + +#elif defined (_MSC_VER) + __cpuidex(output, functionNumber, 0); +#else + #error Unsupported compiler +#endif +} + +// Utility function to horizontally add 8 32-bit integers +static inline int hsum_i32_8(const __m256i a) { + const __m128i sum128 = _mm_add_epi32(_mm256_castsi256_si128(a), _mm256_extractf128_si256(a, 1)); + const __m128i hi64 = _mm_unpackhi_epi64(sum128, sum128); + const __m128i sum64 = _mm_add_epi32(hi64, sum128); + const __m128i hi32 = _mm_shuffle_epi32(sum64, _MM_SHUFFLE(2, 3, 0, 1)); + return _mm_cvtsi128_si32(_mm_add_epi32(sum64, hi32)); +} + +EXPORT int vec_caps() { + int cpuInfo[4] = {-1}; + // Calling __cpuid with 0x0 as the function_id argument + // gets the number of the highest valid function ID. + cpuid(cpuInfo, 0); + int functionIds = cpuInfo[0]; + if (functionIds >= 7) { + cpuid(cpuInfo, 7); + int ebx = cpuInfo[1]; + // AVX2 flag is the 5th bit + // We assume that all processors that have AVX2 also have FMA3 + return (ebx & (1 << 5)) != 0; + } + return 0; +} + +static inline int32_t dot7u_inner(int8_t* a, int8_t* b, size_t dims) { + const __m256i ones = _mm256_set1_epi16(1); + + // Init accumulator(s) with 0 + __m256i acc1 = _mm256_setzero_si256(); + +#pragma GCC unroll 4 + for(int i = 0; i < dims; i += DOT7U_STRIDE_BYTES_LEN) { + // Load packed 8-bit integers + __m256i va1 = _mm256_loadu_si256(a + i); + __m256i vb1 = _mm256_loadu_si256(b + i); + + // Perform multiplication and create 16-bit values + // Vertically multiply each unsigned 8-bit integer from va with the corresponding + // 8-bit integer from vb, producing intermediate signed 16-bit integers. + const __m256i vab = _mm256_maddubs_epi16(va1, vb1); + // Horizontally add adjacent pairs of intermediate signed 16-bit integers, and pack the results. + acc1 = _mm256_add_epi32(_mm256_madd_epi16(ones, vab), acc1); + } + + // reduce (horizontally add all) + return hsum_i32_8(acc1); +} + +EXPORT int32_t dot7u(int8_t* a, int8_t* b, size_t dims) { + int32_t res = 0; + int i = 0; + if (dims > DOT7U_STRIDE_BYTES_LEN) { + i += dims & ~(DOT7U_STRIDE_BYTES_LEN - 1); + res = dot7u_inner(a, b, i); + } + for (; i < dims; i++) { + res += a[i] * b[i]; + } + return res; +} + +static inline int32_t sqr7u_inner(int8_t *a, int8_t *b, size_t dims) { + // Init accumulator(s) with 0 + __m256i acc1 = _mm256_setzero_si256(); + + const __m256i ones = _mm256_set1_epi16(1); + +#pragma GCC unroll 4 + for(int i = 0; i < dims; i += SQR7U_STRIDE_BYTES_LEN) { + // Load packed 8-bit integers + __m256i va1 = _mm256_loadu_si256(a + i); + __m256i vb1 = _mm256_loadu_si256(b + i); + + const __m256i dist1 = _mm256_sub_epi8(va1, vb1); + const __m256i abs_dist1 = _mm256_sign_epi8(dist1, dist1); + const __m256i sqr1 = _mm256_maddubs_epi16(abs_dist1, abs_dist1); + + acc1 = _mm256_add_epi32(_mm256_madd_epi16(ones, sqr1), acc1); + } + + // reduce (accumulate all) + return hsum_i32_8(acc1); +} + +EXPORT int32_t sqr7u(int8_t* a, int8_t* b, size_t dims) { + int32_t res = 0; + int i = 0; + if (dims > SQR7U_STRIDE_BYTES_LEN) { + i += dims & ~(SQR7U_STRIDE_BYTES_LEN - 1); + res = sqr7u_inner(a, b, i); + } + for (; i < dims; i++) { + int32_t dist = a[i] - b[i]; + res += dist * dist; + } + return res; +} + diff --git a/libs/vec/native/src/vec/headers/vec.h b/libs/vec/native/src/vec/headers/vec.h index 5d3806dfccbe6..49fa29ec6fae9 100644 --- a/libs/vec/native/src/vec/headers/vec.h +++ b/libs/vec/native/src/vec/headers/vec.h @@ -7,7 +7,7 @@ */ #ifdef _MSC_VER -#define EXPORT extern "C" __declspec(dllexport) +#define EXPORT __declspec(dllexport) #elif defined(__GNUC__) && !defined(__clang__) #define EXPORT __attribute__((externally_visible,visibility("default"))) #elif __clang__ diff --git a/libs/vec/src/main21/java/org/elasticsearch/vec/internal/AbstractInt7ScalarQuantizedVectorScorer.java b/libs/vec/src/main21/java/org/elasticsearch/vec/internal/AbstractInt7ScalarQuantizedVectorScorer.java index 2e60079da8649..2be0aa53f7c57 100644 --- a/libs/vec/src/main21/java/org/elasticsearch/vec/internal/AbstractInt7ScalarQuantizedVectorScorer.java +++ b/libs/vec/src/main21/java/org/elasticsearch/vec/internal/AbstractInt7ScalarQuantizedVectorScorer.java @@ -78,7 +78,7 @@ protected final void checkOrdinal(int ord) { } } - protected final float fallbackScore(int firstByteOffset, int secondByteOffset) throws IOException { + protected final float fallbackScore(long firstByteOffset, long secondByteOffset) throws IOException { input.seek(firstByteOffset); byte[] a = new byte[dims]; input.readBytes(a, 0, a.length); diff --git a/libs/vec/src/main21/java/org/elasticsearch/vec/internal/Int7DotProduct.java b/libs/vec/src/main21/java/org/elasticsearch/vec/internal/Int7DotProduct.java index f92bf0b52ed07..9b452219bd635 100644 --- a/libs/vec/src/main21/java/org/elasticsearch/vec/internal/Int7DotProduct.java +++ b/libs/vec/src/main21/java/org/elasticsearch/vec/internal/Int7DotProduct.java @@ -34,8 +34,8 @@ public float score(int firstOrd, int secondOrd) throws IOException { checkOrdinal(secondOrd); final int length = dims; - int firstByteOffset = firstOrd * (length + Float.BYTES); - int secondByteOffset = secondOrd * (length + Float.BYTES); + long firstByteOffset = (long) firstOrd * (length + Float.BYTES); + long secondByteOffset = (long) secondOrd * (length + Float.BYTES); MemorySegment firstSeg = segmentSlice(firstByteOffset, length); input.seek(firstByteOffset + length); diff --git a/libs/vec/src/main21/java/org/elasticsearch/vec/internal/Int7Euclidean.java b/libs/vec/src/main21/java/org/elasticsearch/vec/internal/Int7Euclidean.java index e1f16c6909cf4..55b08a899bd7c 100644 --- a/libs/vec/src/main21/java/org/elasticsearch/vec/internal/Int7Euclidean.java +++ b/libs/vec/src/main21/java/org/elasticsearch/vec/internal/Int7Euclidean.java @@ -34,8 +34,8 @@ public float score(int firstOrd, int secondOrd) throws IOException { checkOrdinal(secondOrd); final int length = dims; - int firstByteOffset = firstOrd * (length + Float.BYTES); - int secondByteOffset = secondOrd * (length + Float.BYTES); + long firstByteOffset = (long) firstOrd * (length + Float.BYTES); + long secondByteOffset = (long) secondOrd * (length + Float.BYTES); MemorySegment firstSeg = segmentSlice(firstByteOffset, length); MemorySegment secondSeg = segmentSlice(secondByteOffset, length); diff --git a/libs/vec/src/main21/java/org/elasticsearch/vec/internal/Int7MaximumInnerProduct.java b/libs/vec/src/main21/java/org/elasticsearch/vec/internal/Int7MaximumInnerProduct.java index bd6fc921f1832..5cdfc62bc9071 100644 --- a/libs/vec/src/main21/java/org/elasticsearch/vec/internal/Int7MaximumInnerProduct.java +++ b/libs/vec/src/main21/java/org/elasticsearch/vec/internal/Int7MaximumInnerProduct.java @@ -34,8 +34,8 @@ public float score(int firstOrd, int secondOrd) throws IOException { checkOrdinal(secondOrd); final int length = dims; - int firstByteOffset = firstOrd * (length + Float.BYTES); - int secondByteOffset = secondOrd * (length + Float.BYTES); + long firstByteOffset = (long) firstOrd * (length + Float.BYTES); + long secondByteOffset = (long) secondOrd * (length + Float.BYTES); MemorySegment firstSeg = segmentSlice(firstByteOffset, length); input.seek(firstByteOffset + length); diff --git a/libs/vec/src/test/java/org/elasticsearch/vec/AbstractVectorTestCase.java b/libs/vec/src/test/java/org/elasticsearch/vec/AbstractVectorTestCase.java index 771f665fb4084..13f2d5a03ec76 100644 --- a/libs/vec/src/test/java/org/elasticsearch/vec/AbstractVectorTestCase.java +++ b/libs/vec/src/test/java/org/elasticsearch/vec/AbstractVectorTestCase.java @@ -39,7 +39,9 @@ public static boolean supported() { var arch = System.getProperty("os.arch"); var osName = System.getProperty("os.name"); - if (jdkVersion >= 21 && arch.equals("aarch64") && (osName.startsWith("Mac") || osName.equals("Linux"))) { + if (jdkVersion >= 21 + && (arch.equals("aarch64") && (osName.startsWith("Mac") || osName.equals("Linux")) + || arch.equals("amd64") && osName.equals("Linux"))) { assertThat(factory, isPresent()); return true; } else { diff --git a/libs/vec/src/test/java/org/elasticsearch/vec/VectorScorerFactoryTests.java b/libs/vec/src/test/java/org/elasticsearch/vec/VectorScorerFactoryTests.java index 115cf8e8cf9f8..246ddaeb2ebcf 100644 --- a/libs/vec/src/test/java/org/elasticsearch/vec/VectorScorerFactoryTests.java +++ b/libs/vec/src/test/java/org/elasticsearch/vec/VectorScorerFactoryTests.java @@ -8,6 +8,8 @@ package org.elasticsearch.vec; +import com.carrotsearch.randomizedtesting.generators.RandomNumbers; + import org.apache.lucene.store.Directory; import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexInput; @@ -17,6 +19,8 @@ import java.io.IOException; import java.util.Arrays; import java.util.List; +import java.util.Objects; +import java.util.Random; import java.util.function.Function; import static org.elasticsearch.vec.VectorSimilarityType.COSINE; @@ -226,6 +230,67 @@ void testRandomSliceImpl(int dims, long maxChunkSize, int initialPadding, Functi } } + // Tests with a large amount of data (> 2GB), which ensures that data offsets do not overflow + @Nightly + public void testLarge() throws IOException { + var factory = AbstractVectorTestCase.factory.get(); + + try (Directory dir = new MMapDirectory(createTempDir(getTestName()))) { + final int dims = 8192; + final int size = 262144; + final float correction = randomFloat(); + + String fileName = getTestName() + "-" + dims; + logger.info("Testing " + fileName); + try (IndexOutput out = dir.createOutput(fileName, IOContext.DEFAULT)) { + for (int i = 0; i < size; i++) { + var vec = vector(i, dims); + var off = (float) i; + out.writeBytes(vec, 0, vec.length); + out.writeInt(Float.floatToIntBits(off)); + } + } + try (IndexInput in = dir.openInput(fileName, IOContext.DEFAULT)) { + for (int times = 0; times < TIMES; times++) { + int idx0 = randomIntBetween(0, size - 1); + int idx1 = size - 1; + float off0 = (float) idx0; + float off1 = (float) idx1; + // dot product + float expected = luceneScore(DOT_PRODUCT, vector(idx0, dims), vector(idx1, dims), correction, off0, off1); + var scorer = factory.getInt7ScalarQuantizedVectorScorer(dims, size, correction, DOT_PRODUCT, in).get(); + assertThat(scorer.score(idx0, idx1), equalTo(expected)); + assertThat((new VectorScorerSupplierAdapter(scorer)).scorer(idx0).score(idx1), equalTo(expected)); + // max inner product + expected = luceneScore(MAXIMUM_INNER_PRODUCT, vector(idx0, dims), vector(idx1, dims), correction, off0, off1); + scorer = factory.getInt7ScalarQuantizedVectorScorer(dims, size, correction, MAXIMUM_INNER_PRODUCT, in).get(); + assertThat(scorer.score(idx0, idx1), equalTo(expected)); + assertThat((new VectorScorerSupplierAdapter(scorer)).scorer(idx0).score(idx1), equalTo(expected)); + // cosine + expected = luceneScore(COSINE, vector(idx0, dims), vector(idx1, dims), correction, off0, off1); + scorer = factory.getInt7ScalarQuantizedVectorScorer(dims, size, correction, COSINE, in).get(); + assertThat(scorer.score(idx0, idx1), equalTo(expected)); + assertThat((new VectorScorerSupplierAdapter(scorer)).scorer(idx0).score(idx1), equalTo(expected)); + // euclidean + expected = luceneScore(EUCLIDEAN, vector(idx0, dims), vector(idx1, dims), correction, off0, off1); + scorer = factory.getInt7ScalarQuantizedVectorScorer(dims, size, correction, EUCLIDEAN, in).get(); + assertThat(scorer.score(idx0, idx1), equalTo(expected)); + assertThat((new VectorScorerSupplierAdapter(scorer)).scorer(idx0).score(idx1), equalTo(expected)); + } + } + } + } + + // creates the vector based on the given ordinal, which is reproducible given the ord and dims + static byte[] vector(int ord, int dims) { + var random = new Random(Objects.hash(ord, dims)); + byte[] ba = new byte[dims]; + for (int i = 0; i < dims; i++) { + ba[i] = (byte) RandomNumbers.randomIntBetween(random, MIN_INT7_VALUE, MAX_INT7_VALUE); + } + return ba; + } + static Function BYTE_ARRAY_RANDOM_INT7_FUNC = size -> { byte[] ba = new byte[size]; randomBytesBetween(ba, MIN_INT7_VALUE, MAX_INT7_VALUE); diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/EcsLogsDataStreamIT.java b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/EcsLogsDataStreamIT.java index 3802d572e04dd..5fe72c38078ee 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/EcsLogsDataStreamIT.java +++ b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/EcsLogsDataStreamIT.java @@ -201,12 +201,12 @@ public void testGeneralMockupEcsMappings() throws Exception { "host": { "cpu": { "usage": 0.68 - } - }, - "geo": { - "location": { - "lon": -73.614830, - "lat": 45.505918 + }, + "geo": { + "location": { + "lon": -73.614830, + "lat": 45.505918 + } } }, "data_stream": { @@ -414,7 +414,10 @@ public void testGeneralMockupEcsMappings() throws Exception { getValueFromPath(properties, List.of("host", "properties", "cpu", "properties", "usage", "scaling_factor")), is(1000.0) ); - assertThat(getValueFromPath(properties, List.of("geo", "properties", "location", "type")), is("geo_point")); + assertThat( + getValueFromPath(properties, List.of("host", "properties", "geo", "properties", "location", "type")), + is("geo_point") + ); assertThat(getValueFromPath(properties, List.of("data_stream", "properties", "dataset", "type")), is("constant_keyword")); assertThat(getValueFromPath(properties, List.of("data_stream", "properties", "namespace", "type")), is("constant_keyword")); assertThat(getValueFromPath(properties, List.of("data_stream", "properties", "type", "type")), is("constant_keyword")); diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/LogsDataStreamIT.java b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/LogsDataStreamIT.java index 2370cca08b23e..79d33a95c4709 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/LogsDataStreamIT.java +++ b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/LogsDataStreamIT.java @@ -463,7 +463,6 @@ public void testNoSubobjects() throws Exception { { "@timestamp": "2023-06-12", "start_timestamp": "2023-06-08", - "location" : "POINT (-71.34 41.12)", "test": "flattened", "test.start_timestamp": "not a date", "test.start-timestamp": "not a date", @@ -497,7 +496,7 @@ public void testNoSubobjects() throws Exception { "vulnerability.score.version": "2.0", "vulnerability.textual_score": "bad", "host.cpu.usage": 0.68, - "geo.location": [-73.614830, 45.505918], + "host.geo.location": [-73.614830, 45.505918], "data_stream.dataset": "nginx.access", "data_stream.namespace": "production", "data_stream.custom": "whatever", @@ -521,8 +520,7 @@ public void testNoSubobjects() throws Exception { }, "fields": [ "data_stream.type", - "location", - "geo.location", + "host.geo.location", "test.start-timestamp", "test.start_timestamp", "vulnerability.textual_score" @@ -537,14 +535,9 @@ public void testNoSubobjects() throws Exception { // verify that data_stream.type has the correct constant_keyword value assertThat(fields.get("data_stream.type"), is(List.of("logs"))); // verify geo_point subfields evaluation - assertThat(((List>) fields.get("location")).get(0).get("type"), is("Point")); - List coordinates = ((List>>) fields.get("location")).get(0).get("coordinates"); - assertThat(coordinates.size(), is(2)); - assertThat(coordinates.get(0), equalTo(-71.34)); - assertThat(coordinates.get(1), equalTo(41.12)); - List geoLocation = (List) fields.get("geo.location"); + List geoLocation = (List) fields.get("host.geo.location"); assertThat(((Map) geoLocation.get(0)).get("type"), is("Point")); - coordinates = ((Map>) geoLocation.get(0)).get("coordinates"); + List coordinates = ((Map>) geoLocation.get(0)).get("coordinates"); assertThat(coordinates.size(), is(2)); assertThat(coordinates.get(0), equalTo(-73.614830)); assertThat(coordinates.get(1), equalTo(45.505918)); @@ -612,8 +605,7 @@ public void testNoSubobjects() throws Exception { assertThat(getValueFromPath(properties, List.of("vulnerability.textual_score", "type")), is("float")); assertThat(getValueFromPath(properties, List.of("host.cpu.usage", "type")), is("scaled_float")); assertThat(getValueFromPath(properties, List.of("host.cpu.usage", "scaling_factor")), is(1000.0)); - assertThat(getValueFromPath(properties, List.of("location", "type")), is("geo_point")); - assertThat(getValueFromPath(properties, List.of("geo.location", "type")), is("geo_point")); + assertThat(getValueFromPath(properties, List.of("host.geo.location", "type")), is("geo_point")); assertThat(getValueFromPath(properties, List.of("data_stream.dataset", "type")), is("constant_keyword")); assertThat(getValueFromPath(properties, List.of("data_stream.namespace", "type")), is("constant_keyword")); assertThat(getValueFromPath(properties, List.of("data_stream.type", "type")), is("constant_keyword")); diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java index c04dffe82b3cf..0a423cb375e88 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; @@ -341,12 +342,15 @@ && hasAtLeastOneGeoipProcessor( ); } + @UpdateForV9 // use MINUS_ONE once that means no timeout + private static final TimeValue MASTER_TIMEOUT = TimeValue.MAX_VALUE; + private void startTask(Runnable onFailure) { persistentTasksService.sendStartRequest( GEOIP_DOWNLOADER, GEOIP_DOWNLOADER, new GeoIpTaskParams(), - null, + MASTER_TIMEOUT, ActionListener.wrap(r -> logger.debug("Started geoip downloader task"), e -> { Throwable t = e instanceof RemoteTransportException ? ExceptionsHelper.unwrapCause(e) : e; if (t instanceof ResourceAlreadyExistsException == false) { @@ -368,7 +372,7 @@ private void stopTask(Runnable onFailure) { } } ); - persistentTasksService.sendRemoveRequest(GEOIP_DOWNLOADER, null, ActionListener.runAfter(listener, () -> { + persistentTasksService.sendRemoveRequest(GEOIP_DOWNLOADER, MASTER_TIMEOUT, ActionListener.runAfter(listener, () -> { IndexAbstraction databasesAbstraction = clusterService.state().metadata().getIndicesLookup().get(DATABASES_INDEX); if (databasesAbstraction != null) { // regardless of whether DATABASES_INDEX is an alias, resolve it to a concrete index diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerTransportTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerTransportTests.java index e202d99218144..d2be4212cf41e 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerTransportTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerTransportTests.java @@ -568,11 +568,9 @@ public void dispatchBadRequest(final RestChannel channel, final ThreadContext th } }; // there's only one netty worker thread that's reused across client requests - Settings settings = Settings.builder() - .put(Netty4Plugin.WORKER_COUNT.getKey(), 1) + Settings settings = createBuilderWithPort().put(Netty4Plugin.WORKER_COUNT.getKey(), 1) .put(Netty4Plugin.SETTING_HTTP_WORKER_COUNT.getKey(), 0) .build(); - NioEventLoopGroup group = new NioEventLoopGroup(); AtomicBoolean acceptChannel = new AtomicBoolean(); try ( Netty4HttpServerTransport transport = new Netty4HttpServerTransport( @@ -601,9 +599,9 @@ public boolean test(String profile, InetSocketAddress peerAddress) { ) { transport.start(); int nRetries = randomIntBetween(7, 9); - for (int i = 0; i < nRetries; i++) { - acceptChannel.set(randomBoolean()); - try (Netty4HttpClient client = new Netty4HttpClient()) { + try (Netty4HttpClient client = new Netty4HttpClient()) { + for (int i = 0; i < nRetries; i++) { + acceptChannel.set(randomBoolean()); var responses = client.get(randomFrom(transport.boundAddress().boundAddresses()).address(), "/test/url"); try { if (acceptChannel.get()) { @@ -619,8 +617,6 @@ public boolean test(String profile, InetSocketAddress peerAddress) { } } } - } finally { - group.shutdownGracefully().await(); } } diff --git a/muted-tests.yml b/muted-tests.yml index 341d127c7b64a..210215a131339 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -1,4 +1,6 @@ tests: +- class: "org.elasticsearch.xpack.transform.transforms.scheduling.MonotonicClockTests" + issue: "https://github.com/elastic/elasticsearch/issues/108529" # Examples: # # Mute a single test case in a YAML test suite: diff --git a/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java b/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java index 81b3a086e9aca..08e3ac2cbce8c 100644 --- a/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java +++ b/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java @@ -39,8 +39,10 @@ import java.util.concurrent.TimeUnit; import java.util.function.Function; +import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; /** * Create a simple "daemon controller", put it in the right place and check that it runs. @@ -64,18 +66,19 @@ public class SpawnerNoBootstrapTests extends LuceneTestCase { static { // normally done by ESTestCase, but need here because spawner depends on logging LogConfigurator.loadLog4jPlugins(); + MockLogAppender.init(); } static class ExpectedStreamMessage implements MockLogAppender.LoggingExpectation { final String expectedLogger; final String expectedMessage; - final CountDownLatch matchCalledLatch; - boolean saw; + final CountDownLatch matched; + volatile boolean saw; - ExpectedStreamMessage(String logger, String message, CountDownLatch matchCalledLatch) { + ExpectedStreamMessage(String logger, String message, CountDownLatch matched) { this.expectedLogger = logger; this.expectedMessage = message; - this.matchCalledLatch = matchCalledLatch; + this.matched = matched; } @Override @@ -84,8 +87,8 @@ public void match(LogEvent event) { && event.getLevel().equals(Level.WARN) && event.getMessage().getFormattedMessage().equals(expectedMessage)) { saw = true; + matched.countDown(); } - matchCalledLatch.countDown(); } @Override @@ -129,7 +132,7 @@ public void testNoControllerSpawn() throws IOException { try (Spawner spawner = new Spawner()) { spawner.spawnNativeControllers(environment); - assertThat(spawner.getProcesses(), hasSize(0)); + assertThat(spawner.getProcesses(), is(empty())); } } @@ -228,7 +231,7 @@ private void assertControllerSpawns(final Function pluginsDir // fail if the process does not die within one second; usually it will be even quicker but it depends on OS scheduling assertTrue(process.waitFor(1, TimeUnit.SECONDS)); } else { - assertThat(processes, hasSize(0)); + assertThat(processes, is(empty())); } appender.assertAllExpectationsMatched(); } diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DockerTests.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DockerTests.java index b1240747b1a67..dc4e24959a5c6 100644 --- a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DockerTests.java +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DockerTests.java @@ -1211,6 +1211,7 @@ private List listPlugins() { /** * Check that readiness listener works */ + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/108523") public void test500Readiness() throws Exception { assertFalse(readinessProbe(9399)); // Disabling security so we wait for green diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/util/Archives.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/Archives.java index ecc043906bd1a..787069eb2605c 100644 --- a/qa/packaging/src/test/java/org/elasticsearch/packaging/util/Archives.java +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/Archives.java @@ -264,7 +264,7 @@ public static Shell.Result startElasticsearchWithTty( Locale.ROOT, """ expect - <() { + @Override + @SuppressForbidden(reason = "TemporaryFolder only has io.File methods, not nio.File") + public String get() { + return repoDirectory.getRoot().getPath(); + } + }) + .setting("xpack.security.enabled", "false") + .feature(FeatureFlag.TIME_SERIES_MODE) + .build(); + + @ClassRule + public static TestRule ruleChain = RuleChain.outerRule(repoDirectory).around(cluster); + + protected AbstractRollingUpgradeTestCase(@Name("upgradedNodes") int upgradedNodes) { + super(upgradedNodes); + } + + @Override + protected ElasticsearchCluster getUpgradeCluster() { + return cluster; + } +} diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ClusterFeatureMigrationIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ClusterFeatureMigrationIT.java index 0487b282179a9..73abb634dfd76 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ClusterFeatureMigrationIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ClusterFeatureMigrationIT.java @@ -24,7 +24,7 @@ import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.hasSize; -public class ClusterFeatureMigrationIT extends ParameterizedRollingUpgradeTestCase { +public class ClusterFeatureMigrationIT extends AbstractRollingUpgradeTestCase { @Before public void checkMigrationVersion() { diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DesiredNodesUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DesiredNodesUpgradeIT.java index 73d91ac41fcb7..c7f99b3525f74 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DesiredNodesUpgradeIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DesiredNodesUpgradeIT.java @@ -33,7 +33,7 @@ import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.is; -public class DesiredNodesUpgradeIT extends ParameterizedRollingUpgradeTestCase { +public class DesiredNodesUpgradeIT extends AbstractRollingUpgradeTestCase { private final int desiredNodesVersion; diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DownsampleIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DownsampleIT.java index 757f793ac4c46..488cd966ed65e 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DownsampleIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DownsampleIT.java @@ -25,7 +25,7 @@ import static org.hamcrest.Matchers.equalTo; -public class DownsampleIT extends ParameterizedRollingUpgradeTestCase { +public class DownsampleIT extends AbstractRollingUpgradeTestCase { private static final String FIXED_INTERVAL = "1h"; private String index; diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FeatureUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FeatureUpgradeIT.java index 4fe45c05b157b..fc77eef0ae8bb 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FeatureUpgradeIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FeatureUpgradeIT.java @@ -23,7 +23,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; -public class FeatureUpgradeIT extends ParameterizedRollingUpgradeTestCase { +public class FeatureUpgradeIT extends AbstractRollingUpgradeTestCase { public FeatureUpgradeIT(@Name("upgradedNodes") int upgradedNodes) { super(upgradedNodes); diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FieldCapsIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FieldCapsIT.java index 860cd2c0e8617..306447d8cc2cd 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FieldCapsIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FieldCapsIT.java @@ -40,7 +40,7 @@ * the co-ordinating node if older nodes were included in the system */ @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/103473") -public class FieldCapsIT extends ParameterizedRollingUpgradeTestCase { +public class FieldCapsIT extends AbstractRollingUpgradeTestCase { public FieldCapsIT(@Name("upgradedNodes") int upgradedNodes) { super(upgradedNodes); diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/HealthNodeUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/HealthNodeUpgradeIT.java index 0f210ee4b2450..6647cb413c9f5 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/HealthNodeUpgradeIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/HealthNodeUpgradeIT.java @@ -20,7 +20,7 @@ import static org.hamcrest.CoreMatchers.equalTo; -public class HealthNodeUpgradeIT extends ParameterizedRollingUpgradeTestCase { +public class HealthNodeUpgradeIT extends AbstractRollingUpgradeTestCase { public HealthNodeUpgradeIT(@Name("upgradedNodes") int upgradedNodes) { super(upgradedNodes); diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/IgnoredMetaFieldRollingUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/IgnoredMetaFieldRollingUpgradeIT.java index 874fac615b9b1..1477e2b63cf03 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/IgnoredMetaFieldRollingUpgradeIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/IgnoredMetaFieldRollingUpgradeIT.java @@ -26,7 +26,7 @@ import java.util.Locale; import java.util.Map; -public class IgnoredMetaFieldRollingUpgradeIT extends ParameterizedRollingUpgradeTestCase { +public class IgnoredMetaFieldRollingUpgradeIT extends AbstractRollingUpgradeTestCase { private static final String TERMS_AGG_QUERY = Strings.format(""" { diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/IndexingIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/IndexingIT.java index 82485130f05ce..157e2293b69ae 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/IndexingIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/IndexingIT.java @@ -51,7 +51,7 @@ * xpack rolling restart tests. We should work on a way to remove this * duplication but for now we have no real way to share code. */ -public class IndexingIT extends ParameterizedRollingUpgradeTestCase { +public class IndexingIT extends AbstractRollingUpgradeTestCase { public IndexingIT(@Name("upgradedNodes") int upgradedNodes) { super(upgradedNodes); diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/NodesCapabilitiesUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/NodesCapabilitiesUpgradeIT.java new file mode 100644 index 0000000000000..2acaf33c2130c --- /dev/null +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/NodesCapabilitiesUpgradeIT.java @@ -0,0 +1,68 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.upgrades; + +import com.carrotsearch.randomizedtesting.annotations.Name; + +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.core.UpdateForV9; +import org.junit.Before; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.test.hamcrest.OptionalMatchers.isPresentWith; + +@UpdateForV9 +public class NodesCapabilitiesUpgradeIT extends AbstractRollingUpgradeTestCase { + + private static Boolean upgradingBeforeCapabilities; + + public NodesCapabilitiesUpgradeIT(@Name("upgradedNodes") int upgradedNodes) { + super(upgradedNodes); + } + + @Before + public void checkBeforeHasNoCapabilities() throws IOException { + if (upgradingBeforeCapabilities == null) { + // try to do a _capabilities query on a node before we upgrade + try { + clusterHasCapability("GET", "_capabilities", List.of(), List.of()); + upgradingBeforeCapabilities = false; + } catch (ResponseException e) { + if (e.getResponse().getStatusLine().getStatusCode() == 400) { + upgradingBeforeCapabilities = true; + } else { + throw e; + } + } + } + + assumeTrue("Only valid when upgrading from versions without capabilities API", upgradingBeforeCapabilities); + } + + public void testCapabilitiesReturnsFalsePartiallyUpgraded() throws IOException { + if (isMixedCluster()) { + // capabilities checks should either fail (if talking to an old node), + // or return false as not all nodes have the API (if talking to a new node) + try { + assertThat( + "Upgraded node should report no capabilities supported", + clusterHasCapability("GET", "_capabilities", List.of(), List.of()), + isPresentWith(false) + ); + } catch (ResponseException e) { + if (e.getResponse().getStatusLine().getStatusCode() != 400) { + // throw explicitly to capture exception too + throw new AssertionError("Old node should not have the capabilities API", e); + } + } + } + } +} diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ParameterizedRollingUpgradeTestCase.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ParameterizedRollingUpgradeTestCase.java index 63ed54d05adf2..d5f645c387d61 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ParameterizedRollingUpgradeTestCase.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ParameterizedRollingUpgradeTestCase.java @@ -14,74 +14,45 @@ import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; import org.elasticsearch.test.cluster.ElasticsearchCluster; -import org.elasticsearch.test.cluster.FeatureFlag; -import org.elasticsearch.test.cluster.local.distribution.DistributionType; import org.elasticsearch.test.cluster.util.Version; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.ObjectPath; import org.elasticsearch.test.rest.TestFeatureService; import org.junit.AfterClass; import org.junit.Before; -import org.junit.ClassRule; -import org.junit.rules.RuleChain; -import org.junit.rules.TemporaryFolder; -import org.junit.rules.TestRule; import java.util.HashSet; import java.util.Map; import java.util.Set; -import java.util.function.Supplier; import java.util.stream.IntStream; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; public abstract class ParameterizedRollingUpgradeTestCase extends ESRestTestCase { + protected static final int NODE_NUM = 3; private static final String OLD_CLUSTER_VERSION = System.getProperty("tests.old_cluster_version"); - - private static final TemporaryFolder repoDirectory = new TemporaryFolder(); - - private static final int NODE_NUM = 3; - - private static final ElasticsearchCluster cluster = ElasticsearchCluster.local() - .distribution(DistributionType.DEFAULT) - .version(getOldClusterTestVersion()) - .nodes(NODE_NUM) - .setting("path.repo", new Supplier<>() { - @Override - @SuppressForbidden(reason = "TemporaryFolder only has io.File methods, not nio.File") - public String get() { - return repoDirectory.getRoot().getPath(); - } - }) - .setting("xpack.security.enabled", "false") - .feature(FeatureFlag.TIME_SERIES_MODE) - .build(); - - @ClassRule - public static TestRule ruleChain = RuleChain.outerRule(repoDirectory).around(cluster); - - @ParametersFactory(shuffle = false) - public static Iterable parameters() { - return IntStream.rangeClosed(0, NODE_NUM).boxed().map(n -> new Object[] { n }).toList(); - } - private static final Set upgradedNodes = new HashSet<>(); private static TestFeatureService oldClusterTestFeatureService = null; private static boolean upgradeFailed = false; private static IndexVersion oldIndexVersion; - private final int requestedUpgradedNodes; protected ParameterizedRollingUpgradeTestCase(@Name("upgradedNodes") int upgradedNodes) { this.requestedUpgradedNodes = upgradedNodes; } + @ParametersFactory(shuffle = false) + public static Iterable parameters() { + return IntStream.rangeClosed(0, NODE_NUM).boxed().map(n -> new Object[] { n }).toList(); + } + + protected abstract ElasticsearchCluster getUpgradeCluster(); + @Before public void extractOldClusterFeatures() { if (isOldCluster() && oldClusterTestFeatureService == null) { @@ -135,7 +106,7 @@ public void upgradeNode() throws Exception { if (upgradedNodes.add(n)) { try { logger.info("Upgrading node {} to version {}", n, Version.CURRENT); - cluster.upgradeNodeToVersion(n, Version.CURRENT); + getUpgradeCluster().upgradeNodeToVersion(n, Version.CURRENT); } catch (Exception e) { upgradeFailed = true; throw e; @@ -199,7 +170,7 @@ protected static boolean isUpgradedCluster() { @Override protected String getTestRestCluster() { - return cluster.getHttpAddresses(); + return getUpgradeCluster().getHttpAddresses(); } @Override diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SnapshotBasedRecoveryIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SnapshotBasedRecoveryIT.java index ef80643c82c0d..593630546845d 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SnapshotBasedRecoveryIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SnapshotBasedRecoveryIT.java @@ -42,7 +42,7 @@ import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.notNullValue; -public class SnapshotBasedRecoveryIT extends ParameterizedRollingUpgradeTestCase { +public class SnapshotBasedRecoveryIT extends AbstractRollingUpgradeTestCase { public SnapshotBasedRecoveryIT(@Name("upgradedNodes") int upgradedNodes) { super(upgradedNodes); diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SystemIndicesUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SystemIndicesUpgradeIT.java index fbd6ee8aa3759..a2e3b03c9036f 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SystemIndicesUpgradeIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SystemIndicesUpgradeIT.java @@ -23,7 +23,7 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; -public class SystemIndicesUpgradeIT extends ParameterizedRollingUpgradeTestCase { +public class SystemIndicesUpgradeIT extends AbstractRollingUpgradeTestCase { public SystemIndicesUpgradeIT(@Name("upgradedNodes") int upgradedNodes) { super(upgradedNodes); diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/TsdbIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/TsdbIT.java index 3ce0fc79087c2..2889885f83984 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/TsdbIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/TsdbIT.java @@ -26,7 +26,7 @@ import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; -public class TsdbIT extends ParameterizedRollingUpgradeTestCase { +public class TsdbIT extends AbstractRollingUpgradeTestCase { public TsdbIT(@Name("upgradedNodes") int upgradedNodes) { super(upgradedNodes); diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/UpgradeWithOldIndexSettingsIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/UpgradeWithOldIndexSettingsIT.java index 3af344051030b..8dc3b43abf3e1 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/UpgradeWithOldIndexSettingsIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/UpgradeWithOldIndexSettingsIT.java @@ -24,7 +24,7 @@ import static org.elasticsearch.rest.action.search.RestSearchAction.TOTAL_HITS_AS_INT_PARAM; import static org.hamcrest.Matchers.is; -public class UpgradeWithOldIndexSettingsIT extends ParameterizedRollingUpgradeTestCase { +public class UpgradeWithOldIndexSettingsIT extends AbstractRollingUpgradeTestCase { public UpgradeWithOldIndexSettingsIT(@Name("upgradedNodes") int upgradedNodes) { super(upgradedNodes); diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/VectorSearchIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/VectorSearchIT.java index e78e0978b1d80..21dbad9487d4e 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/VectorSearchIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/VectorSearchIT.java @@ -22,7 +22,7 @@ import static org.hamcrest.Matchers.closeTo; import static org.hamcrest.Matchers.equalTo; -public class VectorSearchIT extends ParameterizedRollingUpgradeTestCase { +public class VectorSearchIT extends AbstractRollingUpgradeTestCase { public VectorSearchIT(@Name("upgradedNodes") int upgradedNodes) { super(upgradedNodes); } diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/XPackIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/XPackIT.java index dade5b53addae..6379a8875dfb4 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/XPackIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/XPackIT.java @@ -22,7 +22,7 @@ * Basic tests for simple xpack functionality that are only run if the * cluster is the on the default distribution. */ -public class XPackIT extends ParameterizedRollingUpgradeTestCase { +public class XPackIT extends AbstractRollingUpgradeTestCase { public XPackIT(@Name("upgradedNodes") int upgradedNodes) { super(upgradedNodes); diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/capabilities.json b/rest-api-spec/src/main/resources/rest-api-spec/api/capabilities.json new file mode 100644 index 0000000000000..28c341d9983cc --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/capabilities.json @@ -0,0 +1,47 @@ +{ + "capabilities": { + "documentation": { + "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/capabilities.html", + "description": "Checks if the specified combination of method, API, parameters, and arbitrary capabilities are supported" + }, + "stability": "experimental", + "visibility": "private", + "headers": { + "accept": [ + "application/json" + ] + }, + "url": { + "paths": [ + { + "path": "/_capabilities", + "methods": [ + "GET" + ] + } + ] + }, + "params": { + "method": { + "type": "enum", + "description": "REST method to check", + "options": [ + "GET", "HEAD", "POST", "PUT", "DELETE" + ], + "default": "GET" + }, + "path": { + "type": "string", + "description": "API path to check" + }, + "parameters": { + "type": "string", + "description": "Comma-separated list of API parameters to check" + }, + "capabilities": { + "type": "string", + "description": "Comma-separated list of arbitrary API capabilities to check" + } + } + } +} diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/README.asciidoc b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/README.asciidoc index 0fcedece97f01..baec8169b4f76 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/README.asciidoc +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/README.asciidoc @@ -143,7 +143,7 @@ The `cluster_features` field can either be a string or an array of strings. [[synthetic_cluster_features]] Note: In order to smoothen the transition from version checks to cluster feature checks, a REST-test specific -synthetic cluster feature named `gte_v{VERSION}` is available for all release versions until including 8.14.0. +synthetic cluster feature named `gte_v{VERSION}` is available for all release versions up to 8.15.0. For instance, `gte_v8.12.2` would be available for all release versions greater than or equal to 8.12.2. [[skip_known_issues]] diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/capabilities/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/capabilities/10_basic.yml new file mode 100644 index 0000000000000..715e696bd1032 --- /dev/null +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/capabilities/10_basic.yml @@ -0,0 +1,28 @@ +--- +"Capabilities API": + + - requires: + capabilities: + - method: GET + path: /_capabilities + parameters: [method, path, parameters, capabilities] + capabilities: [] + reason: "capabilities api requires itself to be supported" + + - do: + capabilities: + method: GET + path: /_capabilities + parameters: method,path,parameters,capabilities + error_trace: false + + - match: { supported: true } + + - do: + capabilities: + method: GET + path: /_capabilities + parameters: unknown + error_trace: false + + - match: { supported: false } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/10_unified.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/10_unified.yml index 3ae8f8b09aa4a..ca1d22e4a1ce7 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/10_unified.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/10_unified.yml @@ -14,12 +14,26 @@ setup: "postings": "type": "text" "index_options": "offsets" + "nested": + "type": "nested" + "properties": + "text": + "type": "text" + "vectors": + "type": "dense_vector" + "dims": 2 + "index": true + "similarity": "l2_norm" + - do: index: index: test id: "1" body: "text" : "The quick brown fox is brown." + "nested": + "text": "The quick brown fox is brown." + "vectors": [1, 2] - do: indices.refresh: {} @@ -43,6 +57,7 @@ teardown: "query" : { "multi_match" : { "query" : "quick brown fox", "fields" : [ "text*"] } }, "highlight" : { "type" : "unified", "fields" : { "*" : {} } } } + - length: { hits.hits.0.highlight: 3 } - match: {hits.hits.0.highlight.text.0: "The quick brown fox is brown."} - match: {hits.hits.0.highlight.text\.fvh.0: "The quick brown fox is brown."} - match: {hits.hits.0.highlight.text\.postings.0: "The quick brown fox is brown."} @@ -58,6 +73,7 @@ teardown: "query" : { "combined_fields" : { "query" : "quick brown fox", "fields" : [ "text*"] } }, "highlight" : { "type" : "unified", "fields" : { "*" : {} } } } + - length: { hits.hits.0.highlight: 3 } - match: {hits.hits.0.highlight.text.0: "The quick brown fox is brown."} - match: {hits.hits.0.highlight.text\.fvh.0: "The quick brown fox is brown."} - match: {hits.hits.0.highlight.text\.postings.0: "The quick brown fox is brown."} @@ -72,11 +88,13 @@ teardown: search: body: { "query": { "multi_match": { "query": "quick brown fox", "type": "phrase", "fields": [ "text*" ] } }, - "highlight": { "type": "unified", "fields": { "*": { } } } } + "highlight": { "type": "unified", "fields": { "*": { } } } + } - - match: { hits.hits.0.highlight.text.0: "The quick brown fox is brown." } - - match: { hits.hits.0.highlight.text\.fvh.0: "The quick brown fox is brown." } - - match: { hits.hits.0.highlight.text\.postings.0: "The quick brown fox is brown." } + - length: { hits.hits.0.highlight: 3 } + - match: { hits.hits.0.highlight.text.0: "The quick brown fox is brown." } + - match: { hits.hits.0.highlight.text\.fvh.0: "The quick brown fox is brown." } + - match: { hits.hits.0.highlight.text\.postings.0: "The quick brown fox is brown." } - do: indices.put_settings: @@ -90,6 +108,7 @@ teardown: "query" : { "multi_match" : { "query" : "quick brown fox", "type": "phrase", "fields" : [ "text*"] } }, "highlight" : { "type" : "unified", "fields" : { "*" : {} } } } + - length: { hits.hits.0.highlight: 3 } - match: {hits.hits.0.highlight.text.0: "The quick brown fox is brown."} - match: {hits.hits.0.highlight.text\.fvh.0: "The quick brown fox is brown."} - match: {hits.hits.0.highlight.text\.postings.0: "The quick brown fox is brown."} @@ -100,43 +119,69 @@ teardown: reason: 'kNN was not correctly skipped until 8.12' - do: - indices.create: - index: test-highlighting-knn - body: - mappings: - "properties": - "vectors": - "type": "dense_vector" - "dims": 2 - "index": true - "similarity": "l2_norm" - "text": - "type": "text" - "fields": - "fvh": - "type": "text" - "term_vector": "with_positions_offsets" - "postings": - "type": "text" - "index_options": "offsets" - - do: - index: - index: test-highlighting-knn - id: "1" - body: - "text" : "The quick brown fox is brown." - "vectors": [1, 2] + search: + index: test + body: { + "query": { "multi_match": { "query": "quick brown fox", "type": "phrase", "fields": [ "text*" ] } }, + "highlight": { "type": "unified", "fields": { "text*": { } } }, + "knn": { "field": "vectors", "query_vector": [1, 2], "k": 10, "num_candidates": 10 } } + + - length: { hits.hits.0.highlight: 3 } + - match: { hits.hits.0.highlight.text.0: "The quick brown fox is brown." } + - match: { hits.hits.0.highlight.text\.fvh.0: "The quick brown fox is brown." } + - match: { hits.hits.0.highlight.text\.postings.0: "The quick brown fox is brown." } + +--- +"Test nested queries automatically disable weighted mode": + - requires: + cluster_features: "gte_v8.15.0" + reason: 'nested was not correctly skipped until 8.15' + - do: - indices.refresh: {} + search: + index: test + body: { + "query": { + "nested": { + "path": "nested", + "query": { + "multi_match": { + "query": "quick brown fox", + "type": "phrase", + "fields": [ "nested.text" ] + } + } + } + }, + "highlight": { "type": "unified", "fields": { "*": { } } } + } + + - length: { hits.hits.0.highlight: 1 } + - match: { hits.hits.0.highlight.nested\.text.0: "The quick brown fox is brown." } - do: search: - index: test-highlighting-knn + index: test body: { - "query": { "multi_match": { "query": "quick brown fox", "type": "phrase", "fields": [ "text*" ] } }, - "highlight": { "type": "unified", "fields": { "*": { } } }, - "knn": { "field": "vectors", "query_vector": [1, 2], "k": 10, "num_candidates": 10 } } + "query": { + "bool": { + "must_not": { + "nested": { + "path": "nested", + "query": { + "multi_match": { "query": "quick red fox", "type": "phrase", "fields": [ "nested.text" ] } + } + } + }, + "should": { + "multi_match": { "query": "quick brown fox", "type": "phrase", "fields": [ "text*" ] } + } + } + }, + "highlight": { "type": "unified", "fields": { "text*": { } } } + } + - length: { hits.hits.0.highlight: 3 } - match: { hits.hits.0.highlight.text.0: "The quick brown fox is brown." } - match: { hits.hits.0.highlight.text\.fvh.0: "The quick brown fox is brown." } - match: { hits.hits.0.highlight.text\.postings.0: "The quick brown fox is brown." } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/DanglingIndicesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/DanglingIndicesIT.java index c47ada432f4b1..0b9ca9d9f9586 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/DanglingIndicesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/DanglingIndicesIT.java @@ -175,6 +175,7 @@ public void testMustAcceptDataLossToImportDanglingIndex() throws Exception { * other will be considered dangling, and can therefore be listed and * deleted through the API */ + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/108288") public void testDanglingIndexCanBeDeleted() throws Exception { final Settings settings = buildSettings(1, true); internalCluster().startNodes(3, settings); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/nodescapabilities/SimpleNodesCapabilitiesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/nodescapabilities/SimpleNodesCapabilitiesIT.java new file mode 100644 index 0000000000000..9b60044c94f70 --- /dev/null +++ b/server/src/internalClusterTest/java/org/elasticsearch/nodescapabilities/SimpleNodesCapabilitiesIT.java @@ -0,0 +1,55 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.nodescapabilities; + +import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; +import org.elasticsearch.action.admin.cluster.node.capabilities.NodesCapabilitiesRequest; +import org.elasticsearch.action.admin.cluster.node.capabilities.NodesCapabilitiesResponse; +import org.elasticsearch.test.ESIntegTestCase; + +import java.io.IOException; + +import static org.elasticsearch.test.hamcrest.OptionalMatchers.isPresentWith; +import static org.hamcrest.Matchers.hasSize; + +@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0) +public class SimpleNodesCapabilitiesIT extends ESIntegTestCase { + + public void testNodesCapabilities() throws IOException { + internalCluster().startNodes(2); + + ClusterHealthResponse clusterHealth = clusterAdmin().prepareHealth().setWaitForGreenStatus().setWaitForNodes("2").get(); + logger.info("--> done cluster_health, status {}", clusterHealth.getStatus()); + + // check we support the capabilities API itself. Which we do. + NodesCapabilitiesResponse response = clusterAdmin().nodesCapabilities(new NodesCapabilitiesRequest().path("_capabilities")) + .actionGet(); + assertThat(response.getNodes(), hasSize(2)); + assertThat(response.isSupported(), isPresentWith(true)); + + // check we support some parameters of the capabilities API + response = clusterAdmin().nodesCapabilities(new NodesCapabilitiesRequest().path("_capabilities").parameters("method", "path")) + .actionGet(); + assertThat(response.getNodes(), hasSize(2)); + assertThat(response.isSupported(), isPresentWith(true)); + + // check we don't support some other parameters of the capabilities API + response = clusterAdmin().nodesCapabilities(new NodesCapabilitiesRequest().path("_capabilities").parameters("method", "invalid")) + .actionGet(); + assertThat(response.getNodes(), hasSize(2)); + assertThat(response.isSupported(), isPresentWith(false)); + + // check we don't support a random invalid api + // TODO this is not working yet - see https://github.com/elastic/elasticsearch/issues/107425 + /*response = clusterAdmin().nodesCapabilities(new NodesCapabilitiesRequest().path("_invalid")) + .actionGet(); + assertThat(response.getNodes(), hasSize(2)); + assertThat(response.isSupported(), isPresentWith(false));*/ + } +} diff --git a/server/src/internalClusterTest/java/org/elasticsearch/readiness/ReadinessClusterIT.java b/server/src/internalClusterTest/java/org/elasticsearch/readiness/ReadinessClusterIT.java index 1f8d55516d508..b7a1dc12406d2 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/readiness/ReadinessClusterIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/readiness/ReadinessClusterIT.java @@ -215,10 +215,9 @@ public Settings onNodeStopped(String nodeName) throws Exception { } } - private Tuple setupClusterStateListenerForError(String node) { + private CountDownLatch setupClusterStateListenerForError(String node) { ClusterService clusterService = internalCluster().clusterService(node); CountDownLatch savedClusterState = new CountDownLatch(1); - AtomicLong metadataVersion = new AtomicLong(-1); clusterService.addListener(new ClusterStateListener() { @Override public void clusterChanged(ClusterChangedEvent event) { @@ -231,13 +230,16 @@ public void clusterChanged(ClusterChangedEvent event) { containsString("Missing handler definition for content key [not_cluster_settings]") ); clusterService.removeListener(this); - metadataVersion.set(event.state().metadata().version()); savedClusterState.countDown(); } } }); - return new Tuple<>(savedClusterState, metadataVersion); + // we need this after we setup the listener above, in case the node started and processed + // settings before we set our listener to cluster state changes. + causeClusterStateUpdate(); + + return savedClusterState; } private void writeFileSettings(String json) throws Exception { @@ -269,22 +271,49 @@ public void testNotReadyOnBadFileSettings() throws Exception { assertMasterNode(internalCluster().nonMasterClient(), masterNode); var savedClusterState = setupClusterStateListenerForError(masterNode); - // we need this after we setup the listener above, in case the node started and processed - // settings before we set our listener to cluster state changes. - causeClusterStateUpdate(); - FileSettingsService masterFileSettingsService = internalCluster().getInstance(FileSettingsService.class, masterNode); assertTrue(masterFileSettingsService.watching()); assertFalse(dataFileSettingsService.watching()); - boolean awaitSuccessful = savedClusterState.v1().await(20, TimeUnit.SECONDS); + boolean awaitSuccessful = savedClusterState.await(20, TimeUnit.SECONDS); assertTrue(awaitSuccessful); ReadinessService s = internalCluster().getInstance(ReadinessService.class, internalCluster().getMasterName()); assertNull(s.boundAddress()); } + public void testReadyAfterRestartWithBadFileSettings() throws Exception { + internalCluster().setBootstrapMasterNodeIndex(0); + writeFileSettings(testJSON); + + logger.info("--> start data node / non master node"); + String dataNode = internalCluster().startNode(Settings.builder().put(dataOnlyNode()).put("discovery.initial_state_timeout", "1s")); + String masterNode = internalCluster().startMasterOnlyNode(); + + assertMasterNode(internalCluster().nonMasterClient(), masterNode); + assertBusy(() -> assertTrue("master node ready", internalCluster().getInstance(ReadinessService.class, masterNode).ready())); + assertBusy(() -> assertTrue("data node ready", internalCluster().getInstance(ReadinessService.class, dataNode).ready())); + + logger.info("--> stop master node"); + Settings masterDataPathSettings = internalCluster().dataPathSettings(internalCluster().getMasterName()); + internalCluster().stopCurrentMasterNode(); + expectMasterNotFound(); + + logger.info("--> write bad file settings before restarting master node"); + writeFileSettings(testErrorJSON); + + logger.info("--> restart master node"); + String nextMasterNode = internalCluster().startNode(Settings.builder().put(nonDataNode(masterNode())).put(masterDataPathSettings)); + + assertMasterNode(internalCluster().nonMasterClient(), nextMasterNode); + + var savedClusterState = setupClusterStateListenerForError(nextMasterNode); + assertTrue(savedClusterState.await(20, TimeUnit.SECONDS)); + + assertTrue("master node ready on restart", internalCluster().getInstance(ReadinessService.class, nextMasterNode).ready()); + } + public void testReadyWhenMissingFileSettings() throws Exception { internalCluster().setBootstrapMasterNodeIndex(0); internalCluster().startNode(Settings.builder().put(dataOnlyNode()).put("discovery.initial_state_timeout", "1s")); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/reservedstate/service/FileSettingsServiceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/reservedstate/service/FileSettingsServiceIT.java index 58dcfdaec5147..38bc372868df0 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/reservedstate/service/FileSettingsServiceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/reservedstate/service/FileSettingsServiceIT.java @@ -243,7 +243,7 @@ public void testReservedStatePersistsOnRestart() throws Exception { FileSettingsService masterFileSettingsService = internalCluster().getInstance(FileSettingsService.class, masterNode); - assertTrue(masterFileSettingsService.watching()); + assertBusy(() -> assertTrue(masterFileSettingsService.watching())); logger.info("--> write some settings"); writeJSONFile(masterNode, testJSON); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RandomSamplerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RandomSamplerIT.java index c9a6cfaf754c6..71402d3e9c1d8 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RandomSamplerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RandomSamplerIT.java @@ -22,6 +22,7 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.avg; import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; @@ -83,9 +84,11 @@ public void setupSuiteScopeCluster() throws Exception { } indexRandom(true, builders); ensureSearchable(); + // Force merge to ensure segment consistency as any segment merging can change which particular documents + // are sampled + assertNoFailures(indicesAdmin().prepareForceMerge("idx").setMaxNumSegments(1).get()); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/105839") public void testRandomSamplerConsistentSeed() { double[] sampleMonotonicValue = new double[1]; double[] sampleNumericValue = new double[1]; diff --git a/server/src/main/java/module-info.java b/server/src/main/java/module-info.java index 475158c7a8709..e6b944262094d 100644 --- a/server/src/main/java/module-info.java +++ b/server/src/main/java/module-info.java @@ -65,6 +65,7 @@ exports org.elasticsearch.action.admin.cluster.desirednodes; exports org.elasticsearch.action.admin.cluster.health; exports org.elasticsearch.action.admin.cluster.migration; + exports org.elasticsearch.action.admin.cluster.node.capabilities; exports org.elasticsearch.action.admin.cluster.node.hotthreads; exports org.elasticsearch.action.admin.cluster.node.info; exports org.elasticsearch.action.admin.cluster.node.reload; diff --git a/server/src/main/java/org/elasticsearch/ElasticsearchException.java b/server/src/main/java/org/elasticsearch/ElasticsearchException.java index 31768ab85474d..2983a2d62de71 100644 --- a/server/src/main/java/org/elasticsearch/ElasticsearchException.java +++ b/server/src/main/java/org/elasticsearch/ElasticsearchException.java @@ -1902,18 +1902,8 @@ private enum ElasticsearchExceptionHandle { 175, TransportVersions.V_8_12_0 ), - SEARCH_TIMEOUT_EXCEPTION( - SearchTimeoutException.class, - SearchTimeoutException::new, - 176, - TransportVersions.SEARCH_TIMEOUT_EXCEPTION_ADDED - ), - INGEST_GRAPH_STRUCTURE_EXCEPTION( - GraphStructureException.class, - GraphStructureException::new, - 177, - TransportVersions.INGEST_GRAPH_STRUCTURE_EXCEPTION - ), + SEARCH_TIMEOUT_EXCEPTION(SearchTimeoutException.class, SearchTimeoutException::new, 176, TransportVersions.V_8_13_0), + INGEST_GRAPH_STRUCTURE_EXCEPTION(GraphStructureException.class, GraphStructureException::new, 177, TransportVersions.V_8_13_0), FAILURE_INDEX_NOT_SUPPORTED_EXCEPTION( FailureIndexNotSupportedException.class, FailureIndexNotSupportedException::new, diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 3f66147181593..95961546f1e1f 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -97,43 +97,9 @@ static TransportVersion def(int id) { public static final TransportVersion V_8_10_X = def(8_500_061); public static final TransportVersion V_8_11_X = def(8_512_00_1); public static final TransportVersion V_8_12_0 = def(8_560_00_0); - public static final TransportVersion DATE_HISTOGRAM_SUPPORT_DOWNSAMPLED_TZ_8_12_PATCH = def(8_560_00_1); - public static final TransportVersion NODE_STATS_REQUEST_SIMPLIFIED = def(8_561_00_0); - public static final TransportVersion TEXT_EXPANSION_TOKEN_PRUNING_CONFIG_ADDED = def(8_562_00_0); - public static final TransportVersion ESQL_ASYNC_QUERY = def(8_563_00_0); - public static final TransportVersion ESQL_STATUS_INCLUDE_LUCENE_QUERIES = def(8_564_00_0); - public static final TransportVersion ESQL_CLUSTER_ALIAS = def(8_565_00_0); - public static final TransportVersion SNAPSHOTS_IN_PROGRESS_TRACKING_REMOVING_NODES_ADDED = def(8_566_00_0); - public static final TransportVersion SMALLER_RELOAD_SECURE_SETTINGS_REQUEST = def(8_567_00_0); - public static final TransportVersion UPDATE_API_KEY_EXPIRATION_TIME_ADDED = def(8_568_00_0); - public static final TransportVersion LAZY_ROLLOVER_ADDED = def(8_569_00_0); - public static final TransportVersion ESQL_PLAN_POINT_LITERAL_WKB = def(8_570_00_0); - public static final TransportVersion HOT_THREADS_AS_BYTES = def(8_571_00_0); - public static final TransportVersion ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED = def(8_572_00_0); - public static final TransportVersion ESQL_ENRICH_POLICY_CCQ_MODE = def(8_573_00_0); - public static final TransportVersion DATE_HISTOGRAM_SUPPORT_DOWNSAMPLED_TZ = def(8_574_00_0); - public static final TransportVersion PEERFINDER_REPORTS_PEERS_MASTERS = def(8_575_00_0); - public static final TransportVersion ESQL_MULTI_CLUSTERS_ENRICH = def(8_576_00_0); - public static final TransportVersion NESTED_KNN_MORE_INNER_HITS = def(8_577_00_0); - public static final TransportVersion REQUIRE_DATA_STREAM_ADDED = def(8_578_00_0); - public static final TransportVersion ML_INFERENCE_COHERE_EMBEDDINGS_ADDED = def(8_579_00_0); - public static final TransportVersion DESIRED_NODE_VERSION_OPTIONAL_STRING = def(8_580_00_0); - public static final TransportVersion ML_INFERENCE_REQUEST_INPUT_TYPE_UNSPECIFIED_ADDED = def(8_581_00_0); - public static final TransportVersion ASYNC_SEARCH_STATUS_SUPPORTS_KEEP_ALIVE = def(8_582_00_0); - public static final TransportVersion KNN_QUERY_NUMCANDS_AS_OPTIONAL_PARAM = def(8_583_00_0); - public static final TransportVersion TRANSFORM_GET_BASIC_STATS = def(8_584_00_0); - public static final TransportVersion NLP_DOCUMENT_CHUNKING_ADDED = def(8_585_00_0); - public static final TransportVersion SEARCH_TIMEOUT_EXCEPTION_ADDED = def(8_586_00_0); - public static final TransportVersion ML_TEXT_EMBEDDING_INFERENCE_SERVICE_ADDED = def(8_587_00_0); - public static final TransportVersion HEALTH_INFO_ENRICHED_WITH_REPOS = def(8_588_00_0); - public static final TransportVersion RESOLVE_CLUSTER_ENDPOINT_ADDED = def(8_589_00_0); - public static final TransportVersion FIELD_CAPS_FIELD_HAS_VALUE = def(8_590_00_0); - public static final TransportVersion ML_INFERENCE_REQUEST_INPUT_TYPE_CLASS_CLUSTER_ADDED = def(8_591_00_0); - public static final TransportVersion ML_DIMENSIONS_SET_BY_USER_ADDED = def(8_592_00_0); - public static final TransportVersion INDEX_REQUEST_NORMALIZED_BYTES_PARSED = def(8_593_00_0); - public static final TransportVersion INGEST_GRAPH_STRUCTURE_EXCEPTION = def(8_594_00_0); + public static final TransportVersion V_8_12_1 = def(8_560_00_1); public static final TransportVersion V_8_13_0 = def(8_595_00_0); - public static final TransportVersion SHUTDOWN_REQUEST_TIMEOUTS_FIX_8_13 = def(8_595_00_1); + public static final TransportVersion V_8_13_4 = def(8_595_00_1); // 8.14.0+ public static final TransportVersion RANDOM_AGG_SHARD_SEED = def(8_596_00_0); public static final TransportVersion ESQL_TIMINGS = def(8_597_00_0); @@ -193,6 +159,10 @@ static TransportVersion def(int id) { public static final TransportVersion NO_GLOBAL_RETENTION_FOR_SYSTEM_DATA_STREAMS = def(8_650_00_0); public static final TransportVersion SHUTDOWN_REQUEST_TIMEOUTS_FIX = def(8_651_00_0); public static final TransportVersion INDEXING_PRESSURE_REQUEST_REJECTIONS_COUNT = def(8_652_00_0); + public static final TransportVersion ROLLUP_USAGE = def(8_653_00_0); + public static final TransportVersion SECURITY_ROLE_DESCRIPTION = def(8_654_00_0); + public static final TransportVersion ML_INFERENCE_AZURE_OPENAI_COMPLETIONS = def(8_655_00_0); + public static final TransportVersion JOIN_STATUS_AGE_SERIALIZATION = def(8_656_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/Version.java b/server/src/main/java/org/elasticsearch/Version.java index a2e04d0bf3d48..dc161766b7954 100644 --- a/server/src/main/java/org/elasticsearch/Version.java +++ b/server/src/main/java/org/elasticsearch/Version.java @@ -174,6 +174,7 @@ public class Version implements VersionId, ToXContentFragment { public static final Version V_8_13_2 = new Version(8_13_02_99); public static final Version V_8_13_3 = new Version(8_13_03_99); public static final Version V_8_13_4 = new Version(8_13_04_99); + public static final Version V_8_13_5 = new Version(8_13_05_99); public static final Version V_8_14_0 = new Version(8_14_00_99); public static final Version V_8_15_0 = new Version(8_15_00_99); public static final Version CURRENT = V_8_15_0; diff --git a/server/src/main/java/org/elasticsearch/action/ActionListener.java b/server/src/main/java/org/elasticsearch/action/ActionListener.java index d07717857169b..21f3df2ab7175 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionListener.java +++ b/server/src/main/java/org/elasticsearch/action/ActionListener.java @@ -31,17 +31,94 @@ import static org.elasticsearch.action.ActionListenerImplementations.safeOnFailure; /** - * A listener for action responses or failures. + *

+ * Callbacks are used extensively throughout Elasticsearch because they enable us to write asynchronous and nonblocking code, i.e. code + * which doesn't necessarily compute a result straight away but also doesn't block the calling thread waiting for the result to become + * available. They support several useful control flows: + *

+ *
    + *
  • They can be completed immediately on the calling thread.
  • + *
  • They can be completed concurrently on a different thread.
  • + *
  • They can be stored in a data structure and completed later on when the system reaches a particular state.
  • + *
  • Most commonly, they can be passed on to other methods that themselves require a callback.
  • + *
  • They can be wrapped in another callback which modifies the behaviour of the original callback, perhaps adding some extra code to run + * before or after completion, before passing them on.
  • + *
+ *

+ * {@link ActionListener} is a general-purpose callback interface that is used extensively across the Elasticsearch codebase. {@link + * ActionListener} is used pretty much everywhere that needs to perform some asynchronous and nonblocking computation. The uniformity makes + * it easier to compose parts of the system together without needing to build adapters to convert back and forth between different kinds of + * callback. It also makes it easier to develop the skills needed to read and understand all the asynchronous code, although this definitely + * takes practice and is certainly not easy in an absolute sense. Finally, it has allowed us to build a rich library for working with {@link + * ActionListener} instances themselves, creating new instances out of existing ones and completing them in interesting ways. See for + * instance: + *

+ *
    + *
  • All the static methods on {@link ActionListener} itself.
  • + *
  • {@link org.elasticsearch.action.support.ThreadedActionListener} for forking work elsewhere.
  • + *
  • {@link org.elasticsearch.action.support.RefCountingListener} for running work in parallel.
  • + *
  • {@link org.elasticsearch.action.support.SubscribableListener} for constructing flexible workflows.
  • + *
+ *

+ * Callback-based asynchronous code can easily call regular synchronous code, but synchronous code cannot run callback-based asynchronous + * code without blocking the calling thread until the callback is called back. This blocking is at best undesirable (threads are too + * expensive to waste with unnecessary blocking) and at worst outright broken (the blocking can lead to deadlock). Unfortunately this means + * that most of our code ends up having to be written with callbacks, simply because it's ultimately calling into some other code that takes + * a callback. The entry points for all Elasticsearch APIs are callback-based (e.g. REST APIs all start at {@link + * org.elasticsearch.rest.BaseRestHandler}{@code #prepareRequest} and transport APIs all start at {@link + * org.elasticsearch.action.support.TransportAction}{@code #doExecute} and the whole system fundamentally works in terms of an event loop + * (an {@code io.netty.channel.EventLoop}) which processes network events via callbacks. + *

+ *

+ * {@link ActionListener} is not an ad-hoc invention. Formally speaking, it is our implementation of the general concept of a + * continuation in the sense of continuation-passing style + * (CPS): an extra argument to a function which defines how to continue the computation when the result is available. This is in contrast to + * direct style which is the more usual style of calling methods that return values directly back to the caller so they can continue + * executing as normal. There's essentially two ways that computation can continue in Java (it can return a value or it can throw an + * exception) which is why {@link ActionListener} has both an {@link #onResponse} and an {@link #onFailure} method. + *

+ *

+ * CPS is strictly more expressive than direct style: direct code can be mechanically translated into continuation-passing style, but CPS + * also enables all sorts of other useful control structures such as forking work onto separate threads, possibly to be executed in + * parallel, perhaps even across multiple nodes, or possibly collecting a list of continuations all waiting for the same condition to be + * satisfied before proceeding (e.g. {@link org.elasticsearch.action.support.SubscribableListener} amongst many others). Some languages have + * first-class support for continuations (e.g. the {@code async} and {@code await} primitives in C#) allowing the programmer to write code + * in direct style away from those exotic control structures, but Java does not. That's why we have to manipulate all the callbacks + * ourselves. + *

+ *

+ * Strictly speaking, CPS requires that a computation only continues by calling the continuation. In Elasticsearch, this means that + * asynchronous methods must have {@code void} return type and may not throw any exceptions. This is mostly the case in our code as written + * today, and is a good guiding principle, but we don't enforce void exceptionless methods and there are some deviations from this rule. In + * particular, it's not uncommon to permit some methods to throw an exception, using things like {@link ActionListener#run} (or an + * equivalent {@code try ... catch ...} block) further up the stack to handle it. Some methods also take (and may complete) an {@link + * ActionListener} parameter, but still return a value separately for other local synchronous work. + *

+ *

+ * This pattern is often used in the transport action layer with the use of the {@link + * org.elasticsearch.action.support.ChannelActionListener} class, which wraps a {@link org.elasticsearch.transport.TransportChannel} + * produced by the transport layer.{@link org.elasticsearch.transport.TransportChannel} implementations can hold a reference to a Netty + * channel with which to pass the response back to the network caller. Netty has a many-to-one association of network callers to channels, + * so a call taking a long time generally won't hog resources: it's cheap. A transport action can take hours to respond and that's alright, + * barring caller timeouts. + *

+ *

+ * Note that we explicitly avoid {@link java.util.concurrent.CompletableFuture} and other similar mechanisms as much as possible. They + * can achieve the same goals as {@link ActionListener}, but can also easily be misused in various ways that lead to severe bugs. In + * particular, futures support blocking while waiting for a result, but this is almost never appropriate in Elasticsearch's production code + * where threads are such a precious resource. Moreover if something throws an {@link Error} then the JVM should exit pretty much straight + * away, but {@link java.util.concurrent.CompletableFuture} can catch an {@link Error} which delays the JVM exit until its result is + * observed. This may be much later, or possibly even never. It's not possible to introduce such bugs when using {@link ActionListener}. + *

*/ public interface ActionListener { /** - * Handle action response. This response may constitute a failure or a - * success but it is up to the listener to make that decision. + * Complete this listener with a successful (or at least, non-exceptional) response. */ void onResponse(Response response); /** - * A failure caused by an exception at some phase of the task. + * Complete this listener with an exceptional response. */ void onFailure(Exception e); diff --git a/server/src/main/java/org/elasticsearch/action/ActionModule.java b/server/src/main/java/org/elasticsearch/action/ActionModule.java index ef73d0470b43e..ab93f98c5648b 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/server/src/main/java/org/elasticsearch/action/ActionModule.java @@ -29,6 +29,7 @@ import org.elasticsearch.action.admin.cluster.migration.PostFeatureUpgradeAction; import org.elasticsearch.action.admin.cluster.migration.TransportGetFeatureUpgradeStatusAction; import org.elasticsearch.action.admin.cluster.migration.TransportPostFeatureUpgradeAction; +import org.elasticsearch.action.admin.cluster.node.capabilities.TransportNodesCapabilitiesAction; import org.elasticsearch.action.admin.cluster.node.hotthreads.TransportNodesHotThreadsAction; import org.elasticsearch.action.admin.cluster.node.info.TransportNodesInfoAction; import org.elasticsearch.action.admin.cluster.node.reload.TransportNodesReloadSecureSettingsAction; @@ -284,6 +285,7 @@ import org.elasticsearch.rest.action.admin.cluster.RestGetStoredScriptAction; import org.elasticsearch.rest.action.admin.cluster.RestGetTaskAction; import org.elasticsearch.rest.action.admin.cluster.RestListTasksAction; +import org.elasticsearch.rest.action.admin.cluster.RestNodesCapabilitiesAction; import org.elasticsearch.rest.action.admin.cluster.RestNodesHotThreadsAction; import org.elasticsearch.rest.action.admin.cluster.RestNodesInfoAction; import org.elasticsearch.rest.action.admin.cluster.RestNodesStatsAction; @@ -616,6 +618,7 @@ public void reg actions.register(TransportNodesInfoAction.TYPE, TransportNodesInfoAction.class); actions.register(TransportRemoteInfoAction.TYPE, TransportRemoteInfoAction.class); + actions.register(TransportNodesCapabilitiesAction.TYPE, TransportNodesCapabilitiesAction.class); actions.register(RemoteClusterNodesAction.TYPE, RemoteClusterNodesAction.TransportAction.class); actions.register(TransportNodesStatsAction.TYPE, TransportNodesStatsAction.class); actions.register(TransportNodesUsageAction.TYPE, TransportNodesUsageAction.class); @@ -833,6 +836,7 @@ public void initRestHandlers(Supplier nodesInCluster, Predicate< registerHandler.accept(new RestClearVotingConfigExclusionsAction()); registerHandler.accept(new RestNodesInfoAction(settingsFilter)); registerHandler.accept(new RestRemoteClusterInfoAction()); + registerHandler.accept(new RestNodesCapabilitiesAction()); registerHandler.accept(new RestNodesStatsAction()); registerHandler.accept(new RestNodesUsageAction()); registerHandler.accept(new RestNodesHotThreadsAction()); @@ -1029,6 +1033,7 @@ public void initRestHandlers(Supplier nodesInCluster, Predicate< @Override protected void configure() { + bind(RestController.class).toInstance(restController); bind(ActionFilters.class).toInstance(actionFilters); bind(DestructiveOperations.class).toInstance(destructiveOperations); bind(new TypeLiteral>() { diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/NodeCapability.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/NodeCapability.java new file mode 100644 index 0000000000000..c26aa673d13fd --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/NodeCapability.java @@ -0,0 +1,43 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.action.admin.cluster.node.capabilities; + +import org.elasticsearch.action.support.nodes.BaseNodeResponse; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; + +import java.io.IOException; + +public class NodeCapability extends BaseNodeResponse { + + private final boolean supported; + + public NodeCapability(StreamInput in) throws IOException { + super(in); + + supported = in.readBoolean(); + } + + public NodeCapability(boolean supported, DiscoveryNode node) { + super(node); + this.supported = supported; + } + + public boolean isSupported() { + return supported; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + + out.writeBoolean(supported); + } +} diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/NodesCapabilitiesRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/NodesCapabilitiesRequest.java new file mode 100644 index 0000000000000..c69d273727238 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/NodesCapabilitiesRequest.java @@ -0,0 +1,75 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.action.admin.cluster.node.capabilities; + +import org.elasticsearch.action.support.nodes.BaseNodesRequest; +import org.elasticsearch.common.Strings; +import org.elasticsearch.core.RestApiVersion; +import org.elasticsearch.rest.RestRequest; + +import java.util.Set; + +public class NodesCapabilitiesRequest extends BaseNodesRequest { + + private RestRequest.Method method = RestRequest.Method.GET; + private String path = "/"; + private Set parameters = Set.of(); + private Set capabilities = Set.of(); + private RestApiVersion restApiVersion = RestApiVersion.current(); + + public NodesCapabilitiesRequest() { + // always send to all nodes + super(Strings.EMPTY_ARRAY); + } + + public NodesCapabilitiesRequest path(String path) { + this.path = path; + return this; + } + + public String path() { + return path; + } + + public NodesCapabilitiesRequest method(RestRequest.Method method) { + this.method = method; + return this; + } + + public RestRequest.Method method() { + return method; + } + + public NodesCapabilitiesRequest parameters(String... parameters) { + this.parameters = Set.of(parameters); + return this; + } + + public Set parameters() { + return parameters; + } + + public NodesCapabilitiesRequest capabilities(String... capabilities) { + this.capabilities = Set.of(capabilities); + return this; + } + + public Set capabilities() { + return capabilities; + } + + public NodesCapabilitiesRequest restApiVersion(RestApiVersion restApiVersion) { + this.restApiVersion = restApiVersion; + return this; + } + + public RestApiVersion restApiVersion() { + return restApiVersion; + } +} diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/NodesCapabilitiesResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/NodesCapabilitiesResponse.java new file mode 100644 index 0000000000000..3527b8cc46840 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/NodesCapabilitiesResponse.java @@ -0,0 +1,55 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.action.admin.cluster.node.capabilities; + +import org.elasticsearch.action.FailedNodeException; +import org.elasticsearch.action.support.TransportAction; +import org.elasticsearch.action.support.nodes.BaseNodesResponse; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xcontent.ToXContentFragment; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.List; +import java.util.Optional; + +public class NodesCapabilitiesResponse extends BaseNodesResponse implements ToXContentFragment { + protected NodesCapabilitiesResponse(ClusterName clusterName, List nodes, List failures) { + super(clusterName, nodes, failures); + } + + @Override + protected List readNodesFrom(StreamInput in) throws IOException { + return TransportAction.localOnly(); + } + + @Override + protected void writeNodesTo(StreamOutput out, List nodes) throws IOException { + TransportAction.localOnly(); + } + + public Optional isSupported() { + if (hasFailures() || getNodes().isEmpty()) { + // there's no nodes in the response (uh? what about ourselves?) + // or there's a problem (hopefully transient) talking to one or more nodes. + // We don't have enough information to decide if it's supported or not, so return unknown + return Optional.empty(); + } + + return Optional.of(getNodes().stream().allMatch(NodeCapability::isSupported)); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + Optional supported = isSupported(); + return builder.field("supported", supported.orElse(null)); + } +} diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/TransportNodesCapabilitiesAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/TransportNodesCapabilitiesAction.java new file mode 100644 index 0000000000000..71aa95908d3b7 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/TransportNodesCapabilitiesAction.java @@ -0,0 +1,163 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.action.admin.cluster.node.capabilities; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.FailedNodeException; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.nodes.TransportNodesAction; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.RestApiVersion; +import org.elasticsearch.features.FeatureService; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.admin.cluster.RestNodesCapabilitiesAction; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.transport.TransportService; + +import java.io.IOException; +import java.util.List; +import java.util.Optional; +import java.util.Set; + +public class TransportNodesCapabilitiesAction extends TransportNodesAction< + NodesCapabilitiesRequest, + NodesCapabilitiesResponse, + TransportNodesCapabilitiesAction.NodeCapabilitiesRequest, + NodeCapability> { + + public static final ActionType TYPE = new ActionType<>("cluster:monitor/nodes/capabilities"); + + private final RestController restController; + private final FeatureService featureService; + + @Inject + public TransportNodesCapabilitiesAction( + ThreadPool threadPool, + ClusterService clusterService, + TransportService transportService, + ActionFilters actionFilters, + RestController restController, + FeatureService featureService + ) { + super( + TYPE.name(), + clusterService, + transportService, + actionFilters, + NodeCapabilitiesRequest::new, + threadPool.executor(ThreadPool.Names.MANAGEMENT) + ); + this.restController = restController; + this.featureService = featureService; + } + + @Override + protected void doExecute(Task task, NodesCapabilitiesRequest request, ActionListener listener) { + if (featureService.clusterHasFeature(clusterService.state(), RestNodesCapabilitiesAction.CAPABILITIES_ACTION) == false) { + // not everything in the cluster supports capabilities. + // Therefore we don't support whatever it is we're being asked for + listener.onResponse(new NodesCapabilitiesResponse(clusterService.getClusterName(), List.of(), List.of()) { + @Override + public Optional isSupported() { + return Optional.of(false); + } + }); + } else { + super.doExecute(task, request, listener); + } + } + + @Override + protected NodesCapabilitiesResponse newResponse( + NodesCapabilitiesRequest request, + List responses, + List failures + ) { + return new NodesCapabilitiesResponse(clusterService.getClusterName(), responses, failures); + } + + @Override + protected NodeCapabilitiesRequest newNodeRequest(NodesCapabilitiesRequest request) { + return new NodeCapabilitiesRequest( + request.method(), + request.path(), + request.parameters(), + request.capabilities(), + request.restApiVersion() + ); + } + + @Override + protected NodeCapability newNodeResponse(StreamInput in, DiscoveryNode node) throws IOException { + return new NodeCapability(in); + } + + @Override + protected NodeCapability nodeOperation(NodeCapabilitiesRequest request, Task task) { + boolean supported = restController.checkSupported( + request.method, + request.path, + request.parameters, + request.capabilities, + request.restApiVersion + ); + return new NodeCapability(supported, transportService.getLocalNode()); + } + + public static class NodeCapabilitiesRequest extends TransportRequest { + private final RestRequest.Method method; + private final String path; + private final Set parameters; + private final Set capabilities; + private final RestApiVersion restApiVersion; + + public NodeCapabilitiesRequest(StreamInput in) throws IOException { + super(in); + + method = in.readEnum(RestRequest.Method.class); + path = in.readString(); + parameters = in.readCollectionAsImmutableSet(StreamInput::readString); + capabilities = in.readCollectionAsImmutableSet(StreamInput::readString); + restApiVersion = RestApiVersion.forMajor(in.readVInt()); + } + + public NodeCapabilitiesRequest( + RestRequest.Method method, + String path, + Set parameters, + Set capabilities, + RestApiVersion restApiVersion + ) { + this.method = method; + this.path = path; + this.parameters = Set.copyOf(parameters); + this.capabilities = Set.copyOf(capabilities); + this.restApiVersion = restApiVersion; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + + out.writeEnum(method); + out.writeString(path); + out.writeCollection(parameters, StreamOutput::writeString); + out.writeCollection(capabilities, StreamOutput::writeString); + out.writeVInt(restApiVersion.major); + } + } +} diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodeHotThreads.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodeHotThreads.java index 1118a6318ddf7..ef5d7f5e74ef8 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodeHotThreads.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodeHotThreads.java @@ -27,7 +27,7 @@ public class NodeHotThreads extends BaseNodeResponse { NodeHotThreads(StreamInput in) throws IOException { super(in); - if (in.getTransportVersion().onOrAfter(TransportVersions.HOT_THREADS_AS_BYTES)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { bytes = in.readReleasableBytesReference(); } else { bytes = ReleasableBytesReference.wrap(new BytesArray(in.readString().getBytes(StandardCharsets.UTF_8))); @@ -56,7 +56,7 @@ public java.io.Reader getHotThreadsReader() { @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - if (out.getTransportVersion().onOrAfter(TransportVersions.HOT_THREADS_AS_BYTES)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeBytesReference(bytes); } else { out.writeString(bytes.utf8ToString()); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsRequest.java index c24833dca49ee..a83a09af642fa 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsRequest.java @@ -97,7 +97,7 @@ public static class NodeRequest extends TransportRequest { NodeRequest(StreamInput in) throws IOException { super(in); - if (in.getTransportVersion().before(TransportVersions.SMALLER_RELOAD_SECURE_SETTINGS_REQUEST)) { + if (in.getTransportVersion().before(TransportVersions.V_8_13_0)) { TaskId.readFromStream(in); in.readStringArray(); in.readOptionalArray(DiscoveryNode::new, DiscoveryNode[]::new); @@ -131,7 +131,7 @@ public void writeTo(StreamOutput out) throws IOException { assert hasReferences(); super.writeTo(out); - if (out.getTransportVersion().before(TransportVersions.SMALLER_RELOAD_SECURE_SETTINGS_REQUEST)) { + if (out.getTransportVersion().before(TransportVersions.V_8_13_0)) { TaskId.EMPTY_TASK_ID.writeTo(out); out.writeStringArray(Strings.EMPTY_ARRAY); out.writeOptionalArray(StreamOutput::writeWriteable, null); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/TransportNodesStatsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/TransportNodesStatsAction.java index 9c7cbc0ec1937..4829f309e1f30 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/TransportNodesStatsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/TransportNodesStatsAction.java @@ -8,6 +8,7 @@ package org.elasticsearch.action.admin.cluster.node.stats; +import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.FailedNodeException; @@ -38,8 +39,6 @@ import java.util.Map; import java.util.Set; -import static org.elasticsearch.TransportVersions.NODE_STATS_REQUEST_SIMPLIFIED; - public class TransportNodesStatsAction extends TransportNodesAction< NodesStatsRequest, NodesStatsResponse, @@ -158,7 +157,7 @@ public static class NodeStatsRequest extends TransportRequest { public NodeStatsRequest(StreamInput in) throws IOException { super(in); - if (in.getTransportVersion().onOrAfter(NODE_STATS_REQUEST_SIMPLIFIED)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { this.nodesStatsRequestParameters = new NodesStatsRequestParameters(in); this.nodesIds = in.readStringArray(); } else { @@ -191,7 +190,7 @@ public String getDescription() { @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - if (out.getTransportVersion().onOrAfter(NODE_STATS_REQUEST_SIMPLIFIED)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { this.nodesStatsRequestParameters.writeTo(out); out.writeStringArrayNullable(nodesIds); } else { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java index 2ec6db339b6ef..21f187f052580 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java @@ -104,7 +104,7 @@ public CreateIndexRequest(StreamInput in) throws IOException { if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_12_0)) { origin = in.readString(); } - if (in.getTransportVersion().onOrAfter(TransportVersions.REQUIRE_DATA_STREAM_ADDED)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { requireDataStream = in.readBoolean(); } else { requireDataStream = false; @@ -487,7 +487,7 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_12_0)) { out.writeString(origin); } - if (out.getTransportVersion().onOrAfter(TransportVersions.REQUIRE_DATA_STREAM_ADDED)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeOptionalBoolean(this.requireDataStream); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveClusterActionRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveClusterActionRequest.java index 1649e4587d63c..118f139045971 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveClusterActionRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveClusterActionRequest.java @@ -62,10 +62,10 @@ public ResolveClusterActionRequest(String[] names, IndicesOptions indicesOptions public ResolveClusterActionRequest(StreamInput in) throws IOException { super(in); - if (in.getTransportVersion().before(TransportVersions.RESOLVE_CLUSTER_ENDPOINT_ADDED)) { + if (in.getTransportVersion().before(TransportVersions.V_8_13_0)) { throw new UnsupportedOperationException( - "ResolveClusterAction requires at least Transport Version " - + TransportVersions.RESOLVE_CLUSTER_ENDPOINT_ADDED.toReleaseVersion() + "ResolveClusterAction requires at least version " + + TransportVersions.V_8_13_0.toReleaseVersion() + " but was " + in.getTransportVersion().toReleaseVersion() ); @@ -78,10 +78,10 @@ public ResolveClusterActionRequest(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - if (out.getTransportVersion().before(TransportVersions.RESOLVE_CLUSTER_ENDPOINT_ADDED)) { + if (out.getTransportVersion().before(TransportVersions.V_8_13_0)) { throw new UnsupportedOperationException( - "ResolveClusterAction requires at least Transport Version " - + TransportVersions.RESOLVE_CLUSTER_ENDPOINT_ADDED.toReleaseVersion() + "ResolveClusterAction requires at least version " + + TransportVersions.V_8_13_0.toReleaseVersion() + " but was " + out.getTransportVersion().toReleaseVersion() ); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveClusterActionResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveClusterActionResponse.java index ee2e3d60dc56e..892b34df2b863 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveClusterActionResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveClusterActionResponse.java @@ -44,10 +44,10 @@ public ResolveClusterActionResponse(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { - if (out.getTransportVersion().before(TransportVersions.RESOLVE_CLUSTER_ENDPOINT_ADDED)) { + if (out.getTransportVersion().before(TransportVersions.V_8_13_0)) { throw new UnsupportedOperationException( - "ResolveClusterAction requires at least Transport Version " - + TransportVersions.RESOLVE_CLUSTER_ENDPOINT_ADDED.toReleaseVersion() + "ResolveClusterAction requires at least version " + + TransportVersions.V_8_13_0.toReleaseVersion() + " but was " + out.getTransportVersion().toReleaseVersion() ); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveClusterInfo.java b/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveClusterInfo.java index 578b4ae547a06..dc2416a1b1baa 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveClusterInfo.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveClusterInfo.java @@ -65,10 +65,10 @@ public ResolveClusterInfo(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { - if (out.getTransportVersion().before(TransportVersions.RESOLVE_CLUSTER_ENDPOINT_ADDED)) { + if (out.getTransportVersion().before(TransportVersions.V_8_13_0)) { throw new UnsupportedOperationException( - "ResolveClusterAction requires at least Transport Version " - + TransportVersions.RESOLVE_CLUSTER_ENDPOINT_ADDED.toReleaseVersion() + "ResolveClusterAction requires at least version " + + TransportVersions.V_8_13_0.toReleaseVersion() + " but was " + out.getTransportVersion().toReleaseVersion() ); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequest.java index 28ef2f644af04..1f582f95aea91 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequest.java @@ -110,7 +110,7 @@ public RolloverRequest(StreamInput in) throws IOException { dryRun = in.readBoolean(); conditions = new RolloverConditions(in); createIndexRequest = new CreateIndexRequest(in); - if (in.getTransportVersion().onOrAfter(TransportVersions.LAZY_ROLLOVER_ADDED)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { lazy = in.readBoolean(); } else { lazy = false; @@ -165,7 +165,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeBoolean(dryRun); conditions.writeTo(out); createIndexRequest.writeTo(out); - if (out.getTransportVersion().onOrAfter(TransportVersions.LAZY_ROLLOVER_ADDED)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeBoolean(lazy); } if (out.getTransportVersion().onOrAfter(TransportVersions.FAILURE_STORE_ROLLOVER)) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverResponse.java index 360ea59e6a299..04b9f6498a3a9 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverResponse.java @@ -8,6 +8,7 @@ package org.elasticsearch.action.admin.indices.rollover; +import org.elasticsearch.TransportVersions; import org.elasticsearch.action.support.master.ShardsAcknowledgedResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -20,8 +21,6 @@ import java.util.Map; import java.util.Objects; -import static org.elasticsearch.TransportVersions.LAZY_ROLLOVER_ADDED; - /** * Response object for {@link RolloverRequest} API * @@ -59,7 +58,7 @@ public final class RolloverResponse extends ShardsAcknowledgedResponse implement dryRun = in.readBoolean(); rolledOver = in.readBoolean(); shardsAcknowledged = in.readBoolean(); - if (in.getTransportVersion().onOrAfter(LAZY_ROLLOVER_ADDED)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { lazy = in.readBoolean(); } else { lazy = false; @@ -142,7 +141,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeBoolean(dryRun); out.writeBoolean(rolledOver); out.writeBoolean(shardsAcknowledged); - if (out.getTransportVersion().onOrAfter(LAZY_ROLLOVER_ADDED)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeBoolean(lazy); } } diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java index d555a41e6c4fc..a53911e5232a5 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java @@ -302,7 +302,6 @@ private void executeBulkRequestsByShard( bulkRequest.getRefreshPolicy(), requests.toArray(new BulkItemRequest[0]) ); - bulkShardRequest.waitForActiveShards(bulkRequest.waitForActiveShards()); bulkShardRequest.timeout(bulkRequest.timeout()); bulkShardRequest.routedBasedOnClusterVersion(clusterState.version()); diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkShardRequest.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkShardRequest.java index bd929b9a2204e..85b7fc03ff667 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkShardRequest.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkShardRequest.java @@ -15,6 +15,7 @@ import org.elasticsearch.action.support.replication.ReplicatedWriteRequest; import org.elasticsearch.action.support.replication.ReplicationRequest; import org.elasticsearch.action.update.UpdateRequest; +import org.elasticsearch.cluster.metadata.InferenceFieldMetadata; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.set.Sets; @@ -22,6 +23,7 @@ import org.elasticsearch.transport.RawIndexingDataTransportRequest; import java.io.IOException; +import java.util.Map; import java.util.Set; public final class BulkShardRequest extends ReplicatedWriteRequest @@ -33,6 +35,8 @@ public final class BulkShardRequest extends ReplicatedWriteRequest inferenceFieldMap = null; + public BulkShardRequest(StreamInput in) throws IOException { super(in); items = in.readArray(i -> i.readOptionalWriteable(inpt -> new BulkItemRequest(shardId, inpt)), BulkItemRequest[]::new); @@ -44,6 +48,30 @@ public BulkShardRequest(ShardId shardId, RefreshPolicy refreshPolicy, BulkItemRe setRefreshPolicy(refreshPolicy); } + /** + * Public for test + * Set the transient metadata indicating that this request requires running inference before proceeding. + */ + public void setInferenceFieldMap(Map fieldInferenceMap) { + this.inferenceFieldMap = fieldInferenceMap; + } + + /** + * Consumes the inference metadata to execute inference on the bulk items just once. + */ + public Map consumeInferenceFieldMap() { + Map ret = inferenceFieldMap; + inferenceFieldMap = null; + return ret; + } + + /** + * Public for test + */ + public Map getInferenceFieldMap() { + return inferenceFieldMap; + } + public long totalSizeInBytes() { long totalSizeInBytes = 0; for (int i = 0; i < items.length; i++) { @@ -85,6 +113,10 @@ public String[] indices() { @Override public void writeTo(StreamOutput out) throws IOException { + if (inferenceFieldMap != null) { + // Inferencing metadata should have been consumed as part of the ShardBulkInferenceActionFilter processing + throw new IllegalStateException("Inference metadata should have been consumed before writing to the stream"); + } super.writeTo(out); out.writeArray((o, item) -> { if (item != null) { diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesNodeRequest.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesNodeRequest.java index da56e20f4e6a4..6c1734bde401f 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesNodeRequest.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesNodeRequest.java @@ -56,7 +56,7 @@ class FieldCapabilitiesNodeRequest extends ActionRequest implements IndicesReque indexFilter = in.readOptionalNamedWriteable(QueryBuilder.class); nowInMillis = in.readLong(); runtimeFields = in.readGenericMap(); - if (in.getTransportVersion().onOrAfter(TransportVersions.FIELD_CAPS_FIELD_HAS_VALUE)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { includeEmptyFields = in.readBoolean(); } else { includeEmptyFields = true; @@ -144,7 +144,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalNamedWriteable(indexFilter); out.writeLong(nowInMillis); out.writeGenericMap(runtimeFields); - if (out.getTransportVersion().onOrAfter(TransportVersions.FIELD_CAPS_FIELD_HAS_VALUE)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeBoolean(includeEmptyFields); } } diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequest.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequest.java index 4b1c256bdeb71..6fab92219511d 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequest.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequest.java @@ -63,7 +63,7 @@ public FieldCapabilitiesRequest(StreamInput in) throws IOException { filters = in.readStringArray(); types = in.readStringArray(); } - if (in.getTransportVersion().onOrAfter(TransportVersions.FIELD_CAPS_FIELD_HAS_VALUE)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { includeEmptyFields = in.readBoolean(); } } @@ -104,7 +104,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeStringArray(filters); out.writeStringArray(types); } - if (out.getTransportVersion().onOrAfter(TransportVersions.FIELD_CAPS_FIELD_HAS_VALUE)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeBoolean(includeEmptyFields); } } diff --git a/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java b/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java index 9d0eeb20dacef..afe918c6853e6 100644 --- a/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java +++ b/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java @@ -52,7 +52,6 @@ import java.util.Map; import java.util.Objects; -import static org.elasticsearch.TransportVersions.INDEX_REQUEST_NORMALIZED_BYTES_PARSED; import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_PRIMARY_TERM; import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO; @@ -189,7 +188,7 @@ public IndexRequest(@Nullable ShardId shardId, StreamInput in) throws IOExceptio dynamicTemplates = in.readMap(StreamInput::readString); } if (in.getTransportVersion().onOrAfter(PIPELINES_HAVE_RUN_FIELD_ADDED) - && in.getTransportVersion().before(INDEX_REQUEST_NORMALIZED_BYTES_PARSED)) { + && in.getTransportVersion().before(TransportVersions.V_8_13_0)) { in.readBoolean(); } if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_12_0)) { @@ -201,14 +200,12 @@ public IndexRequest(@Nullable ShardId shardId, StreamInput in) throws IOExceptio : new ArrayList<>(possiblyImmutableExecutedPipelines); } } - if (in.getTransportVersion().onOrAfter(TransportVersions.REQUIRE_DATA_STREAM_ADDED)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { requireDataStream = in.readBoolean(); + normalisedBytesParsed = in.readZLong(); } else { requireDataStream = false; } - if (in.getTransportVersion().onOrAfter(INDEX_REQUEST_NORMALIZED_BYTES_PARSED)) { - normalisedBytesParsed = in.readZLong(); - } } public IndexRequest() { @@ -772,7 +769,7 @@ private void writeBody(StreamOutput out) throws IOException { } } if (out.getTransportVersion().onOrAfter(PIPELINES_HAVE_RUN_FIELD_ADDED) - && out.getTransportVersion().before(INDEX_REQUEST_NORMALIZED_BYTES_PARSED)) { + && out.getTransportVersion().before(TransportVersions.V_8_13_0)) { out.writeBoolean(normalisedBytesParsed != -1L); } if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_12_0)) { @@ -782,10 +779,8 @@ private void writeBody(StreamOutput out) throws IOException { } } - if (out.getTransportVersion().onOrAfter(TransportVersions.REQUIRE_DATA_STREAM_ADDED)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeBoolean(requireDataStream); - } - if (out.getTransportVersion().onOrAfter(INDEX_REQUEST_NORMALIZED_BYTES_PARSED)) { out.writeZLong(normalisedBytesParsed); } } diff --git a/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java index 2f307d653f8a4..0db9f3d20d117 100644 --- a/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java @@ -27,6 +27,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.AtomicArray; +import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; import org.elasticsearch.index.shard.ShardId; @@ -374,7 +375,17 @@ protected abstract void executePhaseOnShard( protected void fork(final Runnable runnable) { executor.execute(new AbstractRunnable() { @Override - public void onFailure(Exception e) {} + public void onFailure(Exception e) { + logger.error(() -> "unexpected error during [" + task + "]", e); + assert false : e; + } + + @Override + public void onRejection(Exception e) { + // avoid leaks during node shutdown by executing on the current thread if the executor shuts down + assert e instanceof EsRejectedExecutionException esre && esre.isExecutorShutdown() : e; + doRun(); + } @Override protected void doRun() { diff --git a/server/src/main/java/org/elasticsearch/action/search/RankFeaturePhase.java b/server/src/main/java/org/elasticsearch/action/search/RankFeaturePhase.java index a18d2c6418542..767597625edc6 100644 --- a/server/src/main/java/org/elasticsearch/action/search/RankFeaturePhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/RankFeaturePhase.java @@ -22,7 +22,6 @@ public final class RankFeaturePhase extends SearchPhase { private final SearchPhaseContext context; private final SearchPhaseResults queryPhaseResults; - private final SearchPhaseResults rankPhaseResults; private final AggregatedDfs aggregatedDfs; @@ -39,8 +38,6 @@ public final class RankFeaturePhase extends SearchPhase { this.context = context; this.queryPhaseResults = queryPhaseResults; this.aggregatedDfs = aggregatedDfs; - this.rankPhaseResults = new ArraySearchPhaseResults<>(context.getNumShards()); - context.addReleasable(rankPhaseResults); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java index 51a8c6ddb3d76..a12d149bbe342 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java @@ -1303,8 +1303,8 @@ public SearchPhase newSearchPhase( task, true, searchService.getCoordinatorRewriteContextProvider(timeProvider::absoluteStartMillis), - listener.delegateFailureAndWrap((l, iters) -> { - SearchPhase action = newSearchPhase( + listener.delegateFailureAndWrap( + (l, iters) -> newSearchPhase( task, searchRequest, executor, @@ -1317,30 +1317,32 @@ public SearchPhase newSearchPhase( false, threadPool, clusters - ); - action.start(); - }) - ); - } else { - // for synchronous CCS minimize_roundtrips=false, use the CCSSingleCoordinatorSearchProgressListener - // (AsyncSearchTask will not return SearchProgressListener.NOOP, since it uses its own progress listener - // which delegates to CCSSingleCoordinatorSearchProgressListener when minimizing roundtrips) - if (clusters.isCcsMinimizeRoundtrips() == false - && clusters.hasRemoteClusters() - && task.getProgressListener() == SearchProgressListener.NOOP) { - task.setProgressListener(new CCSSingleCoordinatorSearchProgressListener()); - } - final SearchPhaseResults queryResultConsumer = searchPhaseController.newSearchPhaseResults( - executor, - circuitBreaker, - task::isCancelled, - task.getProgressListener(), - searchRequest, - shardIterators.size(), - exc -> searchTransportService.cancelSearchTask(task, "failed to merge result [" + exc.getMessage() + "]") + ).start() + ) ); + } + // for synchronous CCS minimize_roundtrips=false, use the CCSSingleCoordinatorSearchProgressListener + // (AsyncSearchTask will not return SearchProgressListener.NOOP, since it uses its own progress listener + // which delegates to CCSSingleCoordinatorSearchProgressListener when minimizing roundtrips) + if (clusters.isCcsMinimizeRoundtrips() == false + && clusters.hasRemoteClusters() + && task.getProgressListener() == SearchProgressListener.NOOP) { + task.setProgressListener(new CCSSingleCoordinatorSearchProgressListener()); + } + final SearchPhaseResults queryResultConsumer = searchPhaseController.newSearchPhaseResults( + executor, + circuitBreaker, + task::isCancelled, + task.getProgressListener(), + searchRequest, + shardIterators.size(), + exc -> searchTransportService.cancelSearchTask(task, "failed to merge result [" + exc.getMessage() + "]") + ); + boolean success = false; + try { + final SearchPhase searchPhase; if (searchRequest.searchType() == DFS_QUERY_THEN_FETCH) { - return new SearchDfsQueryThenFetchAsyncAction( + searchPhase = new SearchDfsQueryThenFetchAsyncAction( logger, namedWriteableRegistry, searchTransportService, @@ -1359,7 +1361,7 @@ public SearchPhase newSearchPhase( ); } else { assert searchRequest.searchType() == QUERY_THEN_FETCH : searchRequest.searchType(); - return new SearchQueryThenFetchAsyncAction( + searchPhase = new SearchQueryThenFetchAsyncAction( logger, namedWriteableRegistry, searchTransportService, @@ -1377,6 +1379,12 @@ public SearchPhase newSearchPhase( clusters ); } + success = true; + return searchPhase; + } finally { + if (success == false) { + queryResultConsumer.close(); + } } } } diff --git a/server/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java b/server/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java index 0dbe66822d311..e88ebbdc07688 100644 --- a/server/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java @@ -26,6 +26,7 @@ import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.cluster.service.MasterService; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.core.TimeValue; @@ -322,11 +323,25 @@ public void onTimeout(TimeValue timeout) { logger.debug(() -> format("timed out while retrying [%s] after failure (timeout [%s])", actionName, timeout), failure); listener.onFailure(new MasterNotDiscoveredException(failure)); } + + @Override + public String toString() { + return Strings.format( + "listener for [%s] retrying after cluster state version [%d]", + AsyncSingleAction.this, + currentStateVersion + ); + } }, clusterState -> isTaskCancelled() || statePredicate.test(clusterState)); } private boolean isTaskCancelled() { - return task instanceof CancellableTask && ((CancellableTask) task).isCancelled(); + return task instanceof CancellableTask cancellableTask && cancellableTask.isCancelled(); + } + + @Override + public String toString() { + return Strings.format("execution of [%s]", task); } } } diff --git a/server/src/main/java/org/elasticsearch/client/internal/ClusterAdminClient.java b/server/src/main/java/org/elasticsearch/client/internal/ClusterAdminClient.java index f2b9c5ef9631e..daae078ed9a68 100644 --- a/server/src/main/java/org/elasticsearch/client/internal/ClusterAdminClient.java +++ b/server/src/main/java/org/elasticsearch/client/internal/ClusterAdminClient.java @@ -21,6 +21,9 @@ import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequestBuilder; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.cluster.health.TransportClusterHealthAction; +import org.elasticsearch.action.admin.cluster.node.capabilities.NodesCapabilitiesRequest; +import org.elasticsearch.action.admin.cluster.node.capabilities.NodesCapabilitiesResponse; +import org.elasticsearch.action.admin.cluster.node.capabilities.TransportNodesCapabilitiesAction; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequestBuilder; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; @@ -248,6 +251,14 @@ public NodesStatsRequestBuilder prepareNodesStats(String... nodesIds) { return new NodesStatsRequestBuilder(this).setNodesIds(nodesIds); } + public ActionFuture nodesCapabilities(final NodesCapabilitiesRequest request) { + return execute(TransportNodesCapabilitiesAction.TYPE, request); + } + + public void nodesCapabilities(final NodesCapabilitiesRequest request, final ActionListener listener) { + execute(TransportNodesCapabilitiesAction.TYPE, request, listener); + } + public void nodesUsage(final NodesUsageRequest request, final ActionListener listener) { execute(TransportNodesUsageAction.TYPE, request, listener); } diff --git a/server/src/main/java/org/elasticsearch/cluster/SnapshotsInProgress.java b/server/src/main/java/org/elasticsearch/cluster/SnapshotsInProgress.java index 1baa287830c75..532a33d07b25d 100644 --- a/server/src/main/java/org/elasticsearch/cluster/SnapshotsInProgress.java +++ b/server/src/main/java/org/elasticsearch/cluster/SnapshotsInProgress.java @@ -53,8 +53,6 @@ import java.util.Set; import java.util.stream.Stream; -import static org.elasticsearch.TransportVersions.SNAPSHOTS_IN_PROGRESS_TRACKING_REMOVING_NODES_ADDED; - /** * Meta data about snapshots that are currently executing */ @@ -93,7 +91,7 @@ public SnapshotsInProgress(StreamInput in) throws IOException { } private static Set readNodeIdsForRemoval(StreamInput in) throws IOException { - return in.getTransportVersion().onOrAfter(SNAPSHOTS_IN_PROGRESS_TRACKING_REMOVING_NODES_ADDED) + return in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0) ? in.readCollectionAsImmutableSet(StreamInput::readString) : Set.of(); } @@ -246,7 +244,7 @@ public void writeTo(StreamOutput out) throws IOException { while (iterator.hasNext()) { iterator.next().writeTo(out); } - if (out.getTransportVersion().onOrAfter(SNAPSHOTS_IN_PROGRESS_TRACKING_REMOVING_NODES_ADDED)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeStringCollection(nodesIdsForRemoval); } else { assert nodesIdsForRemoval.isEmpty() : nodesIdsForRemoval; @@ -433,7 +431,7 @@ private static boolean assertShardStateConsistent( * running shard snapshots. */ public SnapshotsInProgress withUpdatedNodeIdsForRemoval(ClusterState clusterState) { - assert clusterState.getMinTransportVersion().onOrAfter(TransportVersions.SNAPSHOTS_IN_PROGRESS_TRACKING_REMOVING_NODES_ADDED); + assert clusterState.getMinTransportVersion().onOrAfter(TransportVersions.V_8_13_0); final var updatedNodeIdsForRemoval = new HashSet<>(nodesIdsForRemoval); @@ -1709,7 +1707,7 @@ public void writeTo(StreamOutput out) throws IOException { } else { new SimpleDiffable.CompleteDiff<>(after).writeTo(out); } - if (out.getTransportVersion().onOrAfter(SNAPSHOTS_IN_PROGRESS_TRACKING_REMOVING_NODES_ADDED)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeStringCollection(nodeIdsForRemoval); } else { assert nodeIdsForRemoval.isEmpty() : nodeIdsForRemoval; diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/ClusterFormationFailureHelper.java b/server/src/main/java/org/elasticsearch/cluster/coordination/ClusterFormationFailureHelper.java index c2cd403836593..b46b79754be7a 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/ClusterFormationFailureHelper.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/ClusterFormationFailureHelper.java @@ -43,9 +43,16 @@ import static org.elasticsearch.cluster.coordination.ClusterBootstrapService.INITIAL_MASTER_NODES_SETTING; import static org.elasticsearch.monitor.StatusInfo.Status.UNHEALTHY; +/** + * Handles periodic debug logging of information regarding why the cluster has failed to form. + * Periodic logging begins once {@link #start()} is called, and ceases on {@link #stop()}. + */ public class ClusterFormationFailureHelper { private static final Logger logger = LogManager.getLogger(ClusterFormationFailureHelper.class); + /** + * This time period controls how often warning log messages will be written if this node fails to join or form a cluster. + */ public static final Setting DISCOVERY_CLUSTER_FORMATION_WARNING_TIMEOUT_SETTING = Setting.timeSetting( "discovery.cluster_formation_warning_timeout", TimeValue.timeValueMillis(10000), @@ -61,6 +68,16 @@ public class ClusterFormationFailureHelper { @Nullable // if no warning is scheduled private volatile WarningScheduler warningScheduler; + /** + * Works with the {@link JoinHelper} to log the latest node-join attempt failure and cluster state debug information. Must call + * {@link ClusterFormationState#start()} to begin. + * + * @param settings provides the period in which to log cluster formation errors. + * @param clusterFormationStateSupplier information about the current believed cluster state (See {@link ClusterFormationState}) + * @param threadPool the thread pool on which to run debug logging + * @param logLastFailedJoinAttempt invokes an instance of the JoinHelper to log the last encountered join failure + * (See {@link JoinHelper#logLastFailedJoinAttempt()}) + */ public ClusterFormationFailureHelper( Settings settings, Supplier clusterFormationStateSupplier, @@ -78,6 +95,10 @@ public boolean isRunning() { return warningScheduler != null; } + /** + * Schedules a warning debug message to be logged in 'clusterFormationWarningTimeout' time, and periodically thereafter, until + * {@link ClusterFormationState#stop()} has been called. + */ public void start() { assert warningScheduler == null; warningScheduler = new WarningScheduler(); @@ -129,7 +150,7 @@ public String toString() { } /** - * If this node believes that cluster formation has failed, this record provides information that can be used to determine why that is. + * This record provides node state information that can be used to determine why cluster formation has failed. */ public record ClusterFormationState( List initialMasterNodesSetting, @@ -220,7 +241,7 @@ public ClusterFormationState(StreamInput in) throws IOException { new VotingConfiguration(in), in.readCollectionAsImmutableList(TransportAddress::new), in.readCollectionAsImmutableList(DiscoveryNode::new), - in.getTransportVersion().onOrAfter(TransportVersions.PEERFINDER_REPORTS_PEERS_MASTERS) + in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0) ? in.readCollectionAsImmutableSet(DiscoveryNode::new) : Set.of(), in.readLong(), @@ -402,7 +423,7 @@ public void writeTo(StreamOutput out) throws IOException { lastCommittedConfiguration.writeTo(out); out.writeCollection(resolvedAddresses); out.writeCollection(foundPeers); - if (out.getTransportVersion().onOrAfter(TransportVersions.PEERFINDER_REPORTS_PEERS_MASTERS)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeCollection(mastersOfPeers); } out.writeLong(currentTerm); diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java b/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java index 156ba88a7d2b1..daff05f0fb19b 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java @@ -1781,7 +1781,7 @@ public void run() { final var nodeEligibility = localNodeMayWinElection(lastAcceptedState, electionStrategy); if (nodeEligibility.mayWin() == false) { assert nodeEligibility.reason().isEmpty() == false; - logger.trace( + logger.info( "skip prevoting as local node may not win election ({}): {}", nodeEligibility.reason(), lastAcceptedState.coordinationMetadata() diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/JoinHelper.java b/server/src/main/java/org/elasticsearch/cluster/coordination/JoinHelper.java index b960bb02ceb7f..059400ad81cfb 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/JoinHelper.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/JoinHelper.java @@ -194,13 +194,23 @@ private void unregisterAndReleaseConnection(DiscoveryNode destination, Releasabl Releasables.close(connectionReference); } - // package-private for testing + /** + * Saves information about a join failure. The failure information may be logged later via either {@link FailedJoinAttempt#logNow} + * or {@link FailedJoinAttempt#lastFailedJoinAttempt}. + * + * Package-private for testing. + */ static class FailedJoinAttempt { private final DiscoveryNode destination; private final JoinRequest joinRequest; private final ElasticsearchException exception; private final long timestamp; + /** + * @param destination the master node targeted by the join request. + * @param joinRequest the join request that was sent to the perceived master node. + * @param exception the error response received in reply to the join request attempt. + */ FailedJoinAttempt(DiscoveryNode destination, JoinRequest joinRequest, ElasticsearchException exception) { this.destination = destination; this.joinRequest = joinRequest; @@ -208,10 +218,18 @@ static class FailedJoinAttempt { this.timestamp = System.nanoTime(); } + /** + * Logs the failed join attempt exception. + * {@link FailedJoinAttempt#getLogLevel(ElasticsearchException)} determines at what log-level the log is written. + */ void logNow() { logger.log(getLogLevel(exception), () -> format("failed to join %s with %s", destination, joinRequest), exception); } + /** + * Returns the appropriate log level based on the given exception. Every error is at least DEBUG, but unexpected errors are INFO. + * For example, NotMasterException and CircuitBreakingExceptions are DEBUG logs. + */ static Level getLogLevel(ElasticsearchException e) { Throwable cause = e.unwrapCause(); if (cause instanceof CoordinationStateRejectedException @@ -226,6 +244,10 @@ void logWarnWithTimestamp() { logger.warn( () -> format( "last failed join attempt was %s ago, failed to join %s with %s", + // 'timestamp' is when this error exception was received by the local node. If the time that has passed since the error + // was originally received is quite large, it could indicate that this is a stale error exception from some prior + // out-of-order request response (where a later sent request but earlier received response was successful); or + // alternatively an old error could indicate that this node did not retry the join request for a very long time. TimeValue.timeValueMillis(TimeValue.nsecToMSec(System.nanoTime() - timestamp)), destination, joinRequest @@ -235,6 +257,9 @@ void logWarnWithTimestamp() { } } + /** + * Logs a warning message if {@link #lastFailedJoinAttempt} has been set with a failure. + */ void logLastFailedJoinAttempt() { FailedJoinAttempt attempt = lastFailedJoinAttempt.get(); if (attempt != null) { @@ -247,7 +272,7 @@ public void sendJoinRequest(DiscoveryNode destination, long term, Optional assert destination.isMasterNode() : "trying to join master-ineligible " + destination; final StatusInfo statusInfo = nodeHealthService.getHealth(); if (statusInfo.getStatus() == UNHEALTHY) { - logger.debug("dropping join request to [{}]: [{}]", destination, statusInfo.getInfo()); + logger.debug("dropping join request to [{}], unhealthy status: [{}]", destination, statusInfo.getInfo()); return; } final JoinRequest joinRequest = new JoinRequest( diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/JoinStatus.java b/server/src/main/java/org/elasticsearch/cluster/coordination/JoinStatus.java index 19113bc770000..6e0e7d8dda5a5 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/JoinStatus.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/JoinStatus.java @@ -8,6 +8,7 @@ package org.elasticsearch.cluster.coordination; +import org.elasticsearch.TransportVersions; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -19,7 +20,14 @@ public record JoinStatus(DiscoveryNode remoteNode, long term, String message, TimeValue age) implements Writeable { public JoinStatus(StreamInput in) throws IOException { - this(new DiscoveryNode(in), in.readLong(), in.readString(), new TimeValue(in.readLong(), TimeUnit.valueOf(in.readString()))); + this( + new DiscoveryNode(in), + in.readLong(), + in.readString(), + in.getTransportVersion().onOrAfter(TransportVersions.JOIN_STATUS_AGE_SERIALIZATION) + ? in.readTimeValue() + : new TimeValue(in.readLong(), TimeUnit.valueOf(in.readString())) + ); } @Override @@ -27,7 +35,11 @@ public void writeTo(StreamOutput out) throws IOException { remoteNode.writeTo(out); out.writeLong(term); out.writeString(message); - out.writeLong(age.duration()); - out.writeString(age.timeUnit().name()); + if (out.getTransportVersion().onOrAfter(TransportVersions.JOIN_STATUS_AGE_SERIALIZATION)) { + out.writeTimeValue(age); + } else { + out.writeLong(age.duration()); + out.writeString(age.timeUnit().name()); + } } } diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/NodeJoinExecutor.java b/server/src/main/java/org/elasticsearch/cluster/coordination/NodeJoinExecutor.java index 2c024063e2399..9223e02fc946c 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/NodeJoinExecutor.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/NodeJoinExecutor.java @@ -26,6 +26,7 @@ import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.version.CompatibilityVersions; import org.elasticsearch.common.Priority; +import org.elasticsearch.common.Strings; import org.elasticsearch.features.FeatureService; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; @@ -123,7 +124,14 @@ public ClusterState execute(BatchExecutionContext batchExecutionContex newState = ClusterState.builder(initialState); } else { logger.trace("processing node joins, but we are not the master. current master: {}", currentNodes.getMasterNode()); - throw new NotMasterException("Node [" + currentNodes.getLocalNode() + "] not master for join request"); + throw new NotMasterException( + Strings.format( + "Node [%s] not master for join request. Current known master [%s], current term [%d]", + currentNodes.getLocalNode(), + currentNodes.getMasterNode(), + term + ) + ); } DiscoveryNodes.Builder nodesBuilder = DiscoveryNodes.builder(newState.nodes()); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java index 16ad072f271ff..e4b47b8d26c6e 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java @@ -978,7 +978,7 @@ public DataStream(StreamInput in) throws IOException { in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X) ? in.readOptionalWriteable(DataStreamLifecycle::new) : null, in.getTransportVersion().onOrAfter(DataStream.ADDED_FAILURE_STORE_TRANSPORT_VERSION) ? in.readBoolean() : false, in.getTransportVersion().onOrAfter(DataStream.ADDED_FAILURE_STORE_TRANSPORT_VERSION) ? readIndices(in) : List.of(), - in.getTransportVersion().onOrAfter(TransportVersions.LAZY_ROLLOVER_ADDED) ? in.readBoolean() : false, + in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0) ? in.readBoolean() : false, in.getTransportVersion().onOrAfter(DataStream.ADDED_AUTO_SHARDING_EVENT_VERSION) ? in.readOptionalWriteable(DataStreamAutoShardingEvent::new) : null @@ -1022,7 +1022,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeBoolean(failureStoreEnabled); out.writeCollection(failureIndices); } - if (out.getTransportVersion().onOrAfter(TransportVersions.LAZY_ROLLOVER_ADDED)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeBoolean(rolloverOnWrite); } if (out.getTransportVersion().onOrAfter(DataStream.ADDED_AUTO_SHARDING_EVENT_VERSION)) { diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNode.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNode.java index 1ce950cf71f58..403b4b85e664b 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNode.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNode.java @@ -204,7 +204,7 @@ public static DesiredNode readFrom(StreamInput in) throws IOException { final var memory = ByteSizeValue.readFrom(in); final var storage = ByteSizeValue.readFrom(in); final String version; - if (in.getTransportVersion().onOrAfter(TransportVersions.DESIRED_NODE_VERSION_OPTIONAL_STRING)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { version = in.readOptionalString(); } else { version = Version.readVersion(in).toString(); @@ -237,7 +237,7 @@ public void writeTo(StreamOutput out) throws IOException { } memory.writeTo(out); storage.writeTo(out); - if (out.getTransportVersion().onOrAfter(TransportVersions.DESIRED_NODE_VERSION_OPTIONAL_STRING)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeOptionalString(version); } else { Version parsedVersion = parseLegacyVersion(version); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/ReservedStateMetadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/ReservedStateMetadata.java index ec8200bf2d701..5df045df4ecd8 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/ReservedStateMetadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/ReservedStateMetadata.java @@ -47,6 +47,8 @@ public record ReservedStateMetadata( ) implements SimpleDiffable, ToXContentFragment { public static final Long NO_VERSION = Long.MIN_VALUE; // use min long as sentinel for uninitialized version + public static final Long EMPTY_VERSION = -1L; // use -1 as sentinel for empty metadata + public static final Long RESTORED_VERSION = 0L; // use 0 as sentinel for metadata restored from snapshot private static final ParseField VERSION = new ParseField("version"); private static final ParseField HANDLERS = new ParseField("handlers"); diff --git a/server/src/main/java/org/elasticsearch/common/file/MasterNodeFileWatchingService.java b/server/src/main/java/org/elasticsearch/common/file/MasterNodeFileWatchingService.java index 6da0845a7c7ba..65bfa804cec2f 100644 --- a/server/src/main/java/org/elasticsearch/common/file/MasterNodeFileWatchingService.java +++ b/server/src/main/java/org/elasticsearch/common/file/MasterNodeFileWatchingService.java @@ -15,6 +15,7 @@ import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.gateway.GatewayService; import java.io.IOException; import java.nio.file.Files; @@ -58,7 +59,8 @@ protected void doStop() { @Override public final void clusterChanged(ClusterChangedEvent event) { ClusterState clusterState = event.state(); - if (clusterState.nodes().isLocalNodeElectedMaster()) { + if (clusterState.nodes().isLocalNodeElectedMaster() + && clusterState.blocks().hasGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK) == false) { synchronized (this) { if (watching() || active == false) { refreshExistingFileStateIfNeeded(clusterState); diff --git a/server/src/main/java/org/elasticsearch/common/settings/Setting.java b/server/src/main/java/org/elasticsearch/common/settings/Setting.java index 4fb02fdaac7b4..a385950e10922 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/server/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -113,7 +113,7 @@ public enum Property { DeprecatedWarning, /** - * Node scope + * Cluster-level or configuration file-level setting. Not an index setting. */ NodeScope, diff --git a/server/src/main/java/org/elasticsearch/health/node/HealthInfo.java b/server/src/main/java/org/elasticsearch/health/node/HealthInfo.java index 697c5eff939f9..941e034a83dea 100644 --- a/server/src/main/java/org/elasticsearch/health/node/HealthInfo.java +++ b/server/src/main/java/org/elasticsearch/health/node/HealthInfo.java @@ -39,9 +39,7 @@ public HealthInfo(StreamInput input) throws IOException { input.getTransportVersion().onOrAfter(TransportVersions.V_8_12_0) ? input.readOptionalWriteable(DataStreamLifecycleHealthInfo::new) : null, - input.getTransportVersion().onOrAfter(TransportVersions.HEALTH_INFO_ENRICHED_WITH_REPOS) - ? input.readMap(RepositoriesHealthInfo::new) - : Map.of() + input.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0) ? input.readMap(RepositoriesHealthInfo::new) : Map.of() ); } @@ -51,7 +49,7 @@ public void writeTo(StreamOutput output) throws IOException { if (output.getTransportVersion().onOrAfter(TransportVersions.V_8_12_0)) { output.writeOptionalWriteable(dslHealthInfo); } - if (output.getTransportVersion().onOrAfter(TransportVersions.HEALTH_INFO_ENRICHED_WITH_REPOS)) { + if (output.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { output.writeMap(repositoriesInfoByNode, StreamOutput::writeWriteable); } } diff --git a/server/src/main/java/org/elasticsearch/health/node/UpdateHealthInfoCacheAction.java b/server/src/main/java/org/elasticsearch/health/node/UpdateHealthInfoCacheAction.java index f780de078527a..b0dc5958c7ed0 100644 --- a/server/src/main/java/org/elasticsearch/health/node/UpdateHealthInfoCacheAction.java +++ b/server/src/main/java/org/elasticsearch/health/node/UpdateHealthInfoCacheAction.java @@ -71,7 +71,7 @@ public Request(StreamInput in) throws IOException { if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_12_0)) { this.diskHealthInfo = in.readOptionalWriteable(DiskHealthInfo::new); this.dslHealthInfo = in.readOptionalWriteable(DataStreamLifecycleHealthInfo::new); - this.repositoriesHealthInfo = in.getTransportVersion().onOrAfter(TransportVersions.HEALTH_INFO_ENRICHED_WITH_REPOS) + this.repositoriesHealthInfo = in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0) ? in.readOptionalWriteable(RepositoriesHealthInfo::new) : null; } else { @@ -113,13 +113,13 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_12_0)) { out.writeOptionalWriteable(diskHealthInfo); out.writeOptionalWriteable(dslHealthInfo); - if (out.getTransportVersion().onOrAfter(TransportVersions.HEALTH_INFO_ENRICHED_WITH_REPOS)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeOptionalWriteable(repositoriesHealthInfo); } } else { // BWC for pre-8.12 the disk health info was mandatory. Evolving this request has proven tricky however we've made use of - // waiting for all nodes to be on the {@link TransportVersions.HEALTH_INFO_ENRICHED_WITH_DSL_STATUS} transport version - // before sending any requests to update the health info that'd break the pre HEALTH_INFO_ENRICHED_WITH_DSL_STATUS + // waiting for all nodes to be on the {@link TransportVersions.V_8_12_0} transport version + // before sending any requests to update the health info that'd break the pre-8.12 // transport invariant of always having a disk health information in the request diskHealthInfo.writeTo(out); } diff --git a/server/src/main/java/org/elasticsearch/index/CompositeIndexEventListener.java b/server/src/main/java/org/elasticsearch/index/CompositeIndexEventListener.java index 8b087f5a302db..047c38138fda0 100644 --- a/server/src/main/java/org/elasticsearch/index/CompositeIndexEventListener.java +++ b/server/src/main/java/org/elasticsearch/index/CompositeIndexEventListener.java @@ -95,6 +95,18 @@ public void beforeIndexShardClosed(ShardId shardId, @Nullable IndexShard indexSh } } + @Override + public void afterIndexShardClosing(ShardId shardId, @Nullable IndexShard indexShard, Settings indexSettings) { + for (IndexEventListener listener : listeners) { + try { + listener.afterIndexShardClosing(shardId, indexShard, indexSettings); + } catch (Exception e) { + logger.warn(() -> "[" + shardId.getId() + "] failed to invoke after shard closing callback", e); + throw e; + } + } + } + @Override public void afterIndexShardClosed(ShardId shardId, @Nullable IndexShard indexShard, Settings indexSettings) { for (IndexEventListener listener : listeners) { diff --git a/server/src/main/java/org/elasticsearch/index/IndexService.java b/server/src/main/java/org/elasticsearch/index/IndexService.java index eb0672f7ad122..88db674c3ec2f 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexService.java +++ b/server/src/main/java/org/elasticsearch/index/IndexService.java @@ -649,6 +649,7 @@ public void onFailure(Exception e) { onResponse(null); // otherwise ignore the exception } }, l -> indexShard.close(reason, flushEngine, closeExecutor, l)); + listener.afterIndexShardClosing(sId, indexShard, indexSettings); } } } finally { diff --git a/server/src/main/java/org/elasticsearch/index/IndexVersions.java b/server/src/main/java/org/elasticsearch/index/IndexVersions.java index 78f07c8a137b9..f076ee0be5540 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexVersions.java +++ b/server/src/main/java/org/elasticsearch/index/IndexVersions.java @@ -105,6 +105,7 @@ private static IndexVersion def(int id, Version luceneVersion) { public static final IndexVersion TIME_SERIES_ROUTING_HASH_IN_ID = def(8_504_00_0, Version.LUCENE_9_10_0); public static final IndexVersion DEFAULT_DENSE_VECTOR_TO_INT8_HNSW = def(8_505_00_0, Version.LUCENE_9_10_0); public static final IndexVersion DOC_VALUES_FOR_IGNORED_META_FIELD = def(8_505_00_1, Version.LUCENE_9_10_0); + public static final IndexVersion SOURCE_MAPPER_LOSSY_PARAMS_CHECK = def(8_506_00_0, Version.LUCENE_9_10_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java index 233faf462400b..1b6d6dd1141f4 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java @@ -134,10 +134,11 @@ public static class Builder extends MetadataFieldMapper.Builder { private final boolean supportsNonDefaultParameterValues; - public Builder(IndexMode indexMode, final Settings settings) { + public Builder(IndexMode indexMode, final Settings settings, boolean supportsCheckForNonDefaultParams) { super(Defaults.NAME); this.indexMode = indexMode; - this.supportsNonDefaultParameterValues = settings.getAsBoolean(LOSSY_PARAMETERS_ALLOWED_SETTING_NAME, true); + this.supportsNonDefaultParameterValues = supportsCheckForNonDefaultParams == false + || settings.getAsBoolean(LOSSY_PARAMETERS_ALLOWED_SETTING_NAME, true); } public Builder setSynthetic() { @@ -212,7 +213,11 @@ public SourceFieldMapper build() { c -> c.getIndexSettings().getMode() == IndexMode.TIME_SERIES ? c.getIndexSettings().getIndexVersionCreated().onOrAfter(IndexVersions.V_8_7_0) ? TSDB_DEFAULT : TSDB_LEGACY_DEFAULT : DEFAULT, - c -> new Builder(c.getIndexSettings().getMode(), c.getSettings()) + c -> new Builder( + c.getIndexSettings().getMode(), + c.getSettings(), + c.indexVersionCreated().onOrAfter(IndexVersions.SOURCE_MAPPER_LOSSY_PARAMS_CHECK) + ) ); static final class SourceFieldType extends MappedFieldType { @@ -347,7 +352,7 @@ protected String contentType() { @Override public FieldMapper.Builder getMergeBuilder() { - return new Builder(indexMode, Settings.EMPTY).init(this); + return new Builder(indexMode, Settings.EMPTY, false).init(this); } /** diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexEventListener.java b/server/src/main/java/org/elasticsearch/index/shard/IndexEventListener.java index 5bea31d2d204d..b27a275889751 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexEventListener.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexEventListener.java @@ -55,6 +55,13 @@ default void afterIndexShardStarted(IndexShard indexShard) {} */ default void beforeIndexShardClosed(ShardId shardId, @Nullable IndexShard indexShard, Settings indexSettings) {} + /** + * Called after the index shard has been marked closed. It could still be waiting for the async close of the engine. + * The ordering between this and the subsequent state notifications (closed, deleted, store closed) is + * not guaranteed. + */ + default void afterIndexShardClosing(ShardId shardId, @Nullable IndexShard indexShard, Settings indexSettings) {} + /** * Called after the index shard has been closed. * diff --git a/server/src/main/java/org/elasticsearch/index/shard/RemoveCorruptedShardDataCommand.java b/server/src/main/java/org/elasticsearch/index/shard/RemoveCorruptedShardDataCommand.java index b74abe3cc0790..ace891f9aead6 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/RemoveCorruptedShardDataCommand.java +++ b/server/src/main/java/org/elasticsearch/index/shard/RemoveCorruptedShardDataCommand.java @@ -50,9 +50,7 @@ import org.elasticsearch.index.translog.TruncateTranslogAction; import java.io.IOException; -import java.io.OutputStream; import java.io.PrintStream; -import java.io.PrintWriter; import java.nio.file.Files; import java.nio.file.Path; import java.util.Arrays; @@ -60,6 +58,7 @@ import java.util.Map; import java.util.Objects; +import static java.nio.charset.StandardCharsets.UTF_8; import static org.elasticsearch.common.lucene.Lucene.indexWriterConfigWithNoMerging; public class RemoveCorruptedShardDataCommand extends ElasticsearchNodeCommand { @@ -249,13 +248,7 @@ public void processDataPaths(Terminal terminal, Path[] dataPaths, OptionSet opti throw new ElasticsearchException("translog directory [" + translogPath + "], must exist and be a directory"); } - final PrintWriter writer = terminal.getWriter(); - final PrintStream printStream = new PrintStream(new OutputStream() { - @Override - public void write(int b) { - writer.write(b); - } - }, false, "UTF-8"); + final PrintStream printStream = new PrintStream(terminal.asLineOutputStream(UTF_8), false, UTF_8); final boolean verbose = terminal.isPrintable(Terminal.Verbosity.VERBOSE); final Directory indexDirectory = getDirectory(indexPath); diff --git a/server/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java b/server/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java index fa2475921aa93..c682c44b47bab 100644 --- a/server/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java +++ b/server/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java @@ -1067,27 +1067,30 @@ public void accept(final IndexShard.ShardFailure shardFailure) { final ShardRouting shardRouting = shardFailure.routing(); threadPool.generic().execute(() -> { synchronized (IndicesClusterStateService.this) { - try { - CloseUtils.executeDirectly( - l -> failAndRemoveShard( - shardRouting, - true, - "shard failure, reason [" + shardFailure.reason() + "]", - shardFailure.cause(), - clusterService.state(), - EsExecutors.DIRECT_EXECUTOR_SERVICE /* NB holding mutex while closing shard, ES-8334 TODO revisit this? */, - l - ) - ); - } catch (Exception e) { - // should not be possible - final var wrappedException = new IllegalStateException( - "unexpected failure in FailedShardHandler on " + shardRouting, - e - ); - logger.error(wrappedException.getMessage(), e); - assert false : e; - } + ActionListener.run(ActionListener.assertOnce(new ActionListener() { + @Override + public void onResponse(Void unused) {} + + @Override + public void onFailure(Exception e) { + final var wrappedException = new IllegalStateException( + "unexpected failure in FailedShardHandler on " + shardRouting, + e + ); + logger.error(wrappedException.getMessage(), e); + assert false : e; + } + }), + l -> failAndRemoveShard( + shardRouting, + true, + "shard failure, reason [" + shardFailure.reason() + "]", + shardFailure.cause(), + clusterService.state(), + shardCloseExecutor, + l + ) + ); } }); } diff --git a/server/src/main/java/org/elasticsearch/inference/FilteredXContent.java b/server/src/main/java/org/elasticsearch/inference/FilteredXContent.java new file mode 100644 index 0000000000000..1c63aea61b7c8 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/inference/FilteredXContent.java @@ -0,0 +1,21 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.inference; + +import org.elasticsearch.xcontent.ToXContentObject; + +/** + * Provides a contract for retrieving exposed fields. + */ +public interface FilteredXContent { + /** + * Returns a {@link ToXContentObject} that only writes the exposed fields. Any hidden fields are not written. + */ + ToXContentObject getFilteredXContentObject(); +} diff --git a/server/src/main/java/org/elasticsearch/inference/ServiceSettings.java b/server/src/main/java/org/elasticsearch/inference/ServiceSettings.java index 6c1a01acb0dab..b143f74c848c1 100644 --- a/server/src/main/java/org/elasticsearch/inference/ServiceSettings.java +++ b/server/src/main/java/org/elasticsearch/inference/ServiceSettings.java @@ -12,12 +12,7 @@ import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; import org.elasticsearch.xcontent.ToXContentObject; -public interface ServiceSettings extends ToXContentObject, VersionedNamedWriteable { - - /** - * Returns a {@link ToXContentObject} that only writes the exposed fields. Any hidden fields are not written. - */ - ToXContentObject getFilteredXContentObject(); +public interface ServiceSettings extends ToXContentObject, VersionedNamedWriteable, FilteredXContent { /** * Similarity used in the service. Will be null if not applicable. diff --git a/server/src/main/java/org/elasticsearch/lucene/search/uhighlight/CustomUnifiedHighlighter.java b/server/src/main/java/org/elasticsearch/lucene/search/uhighlight/CustomUnifiedHighlighter.java index 5c1381f730013..c29e248b1a689 100644 --- a/server/src/main/java/org/elasticsearch/lucene/search/uhighlight/CustomUnifiedHighlighter.java +++ b/server/src/main/java/org/elasticsearch/lucene/search/uhighlight/CustomUnifiedHighlighter.java @@ -293,7 +293,8 @@ public QueryVisitor getSubVisitor(BooleanClause.Occur occur, Query parent) { if (parent instanceof ESToParentBlockJoinQuery) { hasUnknownLeaf[0] = true; } - return super.getSubVisitor(occur, parent); + // we want to visit all queries, including those within the must_not clauses. + return this; } }); return hasUnknownLeaf[0]; diff --git a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java index b49f43234b0cc..d90fce8bbc742 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java +++ b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java @@ -259,7 +259,7 @@ static NodeConstruction prepareConstruction( ThreadPool threadPool = constructor.createThreadPool(settings, telemetryProvider.getMeterRegistry()); SettingsModule settingsModule = constructor.validateSettings(initialEnvironment.settings(), settings, threadPool); - SearchModule searchModule = constructor.createSearchModule(settingsModule.getSettings(), threadPool); + SearchModule searchModule = constructor.createSearchModule(settingsModule.getSettings(), threadPool, telemetryProvider); constructor.createClientAndRegistries(settingsModule.getSettings(), threadPool, searchModule); DocumentParsingProvider documentParsingProvider = constructor.getDocumentParsingProvider(); @@ -527,9 +527,9 @@ private SettingsModule validateSettings(Settings envSettings, Settings settings, return settingsModule; } - private SearchModule createSearchModule(Settings settings, ThreadPool threadPool) { + private SearchModule createSearchModule(Settings settings, ThreadPool threadPool, TelemetryProvider telemetryProvider) { IndexSearcher.setMaxClauseCount(SearchUtils.calculateMaxClauseValue(threadPool)); - return new SearchModule(settings, pluginsService.filterPlugins(SearchPlugin.class).toList()); + return new SearchModule(settings, pluginsService.filterPlugins(SearchPlugin.class).toList(), telemetryProvider); } /** diff --git a/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java b/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java index 8719c8cbf8730..f765ee591fb40 100644 --- a/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java +++ b/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java @@ -102,7 +102,7 @@ protected boolean shouldRefreshFileState(ClusterState clusterState) { // We check if the version was reset to 0, and force an update if a file exists. This can happen in situations // like snapshot restores. ReservedStateMetadata fileSettingsMetadata = clusterState.metadata().reservedStateMetadata().get(NAMESPACE); - return fileSettingsMetadata != null && fileSettingsMetadata.version() == 0L; + return fileSettingsMetadata != null && fileSettingsMetadata.version().equals(ReservedStateMetadata.RESTORED_VERSION); } /** diff --git a/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedClusterStateService.java b/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedClusterStateService.java index d2aea19417787..a281db9f02383 100644 --- a/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedClusterStateService.java +++ b/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedClusterStateService.java @@ -42,6 +42,7 @@ import java.util.stream.Collectors; import static org.elasticsearch.ExceptionsHelper.stackTrace; +import static org.elasticsearch.cluster.metadata.ReservedStateMetadata.EMPTY_VERSION; import static org.elasticsearch.core.Strings.format; import static org.elasticsearch.reservedstate.service.ReservedStateErrorTask.checkErrorVersion; import static org.elasticsearch.reservedstate.service.ReservedStateErrorTask.isNewError; @@ -112,7 +113,7 @@ ReservedStateChunk parse(String namespace, XContentParser parser) { try { return stateChunkParser.apply(parser, null); } catch (Exception e) { - ErrorState errorState = new ErrorState(namespace, -1L, e, ReservedStateErrorMetadata.ErrorKind.PARSING); + ErrorState errorState = new ErrorState(namespace, EMPTY_VERSION, e, ReservedStateErrorMetadata.ErrorKind.PARSING); updateErrorState(errorState); logger.debug("error processing state change request for [{}] with the following errors [{}]", namespace, errorState); @@ -134,7 +135,7 @@ public void process(String namespace, XContentParser parser, Consumer try { stateChunk = parse(namespace, parser); } catch (Exception e) { - ErrorState errorState = new ErrorState(namespace, -1L, e, ReservedStateErrorMetadata.ErrorKind.PARSING); + ErrorState errorState = new ErrorState(namespace, EMPTY_VERSION, e, ReservedStateErrorMetadata.ErrorKind.PARSING); updateErrorState(errorState); logger.debug("error processing state change request for [{}] with the following errors [{}]", namespace, errorState); @@ -148,7 +149,7 @@ public void process(String namespace, XContentParser parser, Consumer } public void initEmpty(String namespace, ActionListener listener) { - var missingVersion = new ReservedStateVersion(-1L, Version.CURRENT); + var missingVersion = new ReservedStateVersion(EMPTY_VERSION, Version.CURRENT); var emptyState = new ReservedStateChunk(Map.of(), missingVersion); updateTaskQueue.submitTask( "empty initial cluster state [" + namespace + "]", diff --git a/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateErrorTask.java b/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateErrorTask.java index 0be4a7972d05c..1a45a357fe621 100644 --- a/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateErrorTask.java +++ b/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateErrorTask.java @@ -18,6 +18,9 @@ import org.elasticsearch.cluster.metadata.ReservedStateErrorMetadata; import org.elasticsearch.cluster.metadata.ReservedStateMetadata; +import static org.elasticsearch.cluster.metadata.ReservedStateMetadata.EMPTY_VERSION; +import static org.elasticsearch.cluster.metadata.ReservedStateMetadata.NO_VERSION; +import static org.elasticsearch.cluster.metadata.ReservedStateMetadata.RESTORED_VERSION; import static org.elasticsearch.core.Strings.format; /** @@ -50,8 +53,10 @@ ActionListener listener() { static boolean isNewError(ReservedStateMetadata existingMetadata, Long newStateVersion) { return (existingMetadata == null || existingMetadata.errorMetadata() == null - || newStateVersion <= 0 // version will be -1 when we can't even parse the file, it might be 0 on snapshot restore - || existingMetadata.errorMetadata().version() < newStateVersion); + || existingMetadata.errorMetadata().version() < newStateVersion + || newStateVersion.equals(RESTORED_VERSION) + || newStateVersion.equals(EMPTY_VERSION) + || newStateVersion.equals(NO_VERSION)); } static boolean checkErrorVersion(ClusterState currentState, ErrorState errorState) { diff --git a/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTask.java b/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTask.java index 2ee9aa0d86a0e..1ac42a91736c3 100644 --- a/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTask.java +++ b/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTask.java @@ -19,6 +19,7 @@ import org.elasticsearch.cluster.metadata.ReservedStateErrorMetadata; import org.elasticsearch.cluster.metadata.ReservedStateHandlerMetadata; import org.elasticsearch.cluster.metadata.ReservedStateMetadata; +import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.reservedstate.NonStateTransformResult; import org.elasticsearch.reservedstate.ReservedClusterStateHandler; import org.elasticsearch.reservedstate.TransformState; @@ -80,6 +81,13 @@ ActionListener listener() { } protected ClusterState execute(final ClusterState currentState) { + if (currentState.blocks().hasGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK)) { + // If cluster state has become blocked, this task was submitted while the node was master but is now not master. + // The new master will re-read file settings, so whatever update was to be written here will be handled + // by the new master. + return currentState; + } + ReservedStateMetadata existingMetadata = currentState.metadata().reservedStateMetadata().get(namespace); Map reservedState = stateChunk.state(); ReservedStateVersion reservedStateVersion = stateChunk.metadata(); @@ -169,12 +177,11 @@ static boolean checkMetadataVersion( return false; } - // Version -1 is special, it means "empty" - if (reservedStateVersion.version() == -1L) { + if (reservedStateVersion.version().equals(ReservedStateMetadata.EMPTY_VERSION)) { return true; } - // Version 0 is special, snapshot restores will reset to 0. + // require a regular positive version, reject any special version if (reservedStateVersion.version() <= 0L) { logger.warn( () -> format( diff --git a/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java b/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java index d075983464f76..b142e4d567c04 100644 --- a/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java +++ b/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java @@ -12,6 +12,7 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; +import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.RefCounted; import org.elasticsearch.core.Releasable; @@ -75,8 +76,20 @@ public final long getUsageCount() { @Override public abstract List routes(); + private static final Set ALWAYS_SUPPORTED = Set.of("format", "filter_path", "pretty", "human"); + @Override public final void handleRequest(RestRequest request, RestChannel channel, NodeClient client) throws Exception { + // check if the query has any parameters that are not in the supported set (if declared) + Set supported = supportedQueryParameters(); + if (supported != null) { + var allSupported = Sets.union(ALWAYS_SUPPORTED, supported); + if (allSupported.containsAll(request.params().keySet()) == false) { + Set unsupported = Sets.difference(request.params().keySet(), allSupported); + throw new IllegalArgumentException(unrecognized(request, unsupported, allSupported, "parameter")); + } + } + // prepare the request for execution; has the side effect of touching the request parameters try (var action = prepareRequest(request, client)) { diff --git a/server/src/main/java/org/elasticsearch/rest/RestController.java b/server/src/main/java/org/elasticsearch/rest/RestController.java index 8ce9b08eba205..16813f1141e12 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestController.java +++ b/server/src/main/java/org/elasticsearch/rest/RestController.java @@ -365,6 +365,32 @@ public void dispatchBadRequest(final RestChannel channel, final ThreadContext th } } + public boolean checkSupported( + RestRequest.Method method, + String path, + Set parameters, + Set capabilities, + RestApiVersion restApiVersion + ) { + Iterator allHandlers = getAllHandlers(null, path); + while (allHandlers.hasNext()) { + RestHandler handler; + MethodHandlers handlers = allHandlers.next(); + if (handlers == null) { + handler = null; + } else { + handler = handlers.getHandler(method, restApiVersion); + } + + if (handler != null) { + var supportedParams = handler.supportedQueryParameters(); + return (supportedParams == null || supportedParams.containsAll(parameters)) + && handler.supportedCapabilities().containsAll(capabilities); + } + } + return false; + } + @Override public Map getStats() { final Iterator methodHandlersIterator = handlers.allNodeValues(); diff --git a/server/src/main/java/org/elasticsearch/rest/RestFeatures.java b/server/src/main/java/org/elasticsearch/rest/RestFeatures.java index 73b788d63b2ab..111204fbe7fb8 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestFeatures.java +++ b/server/src/main/java/org/elasticsearch/rest/RestFeatures.java @@ -12,10 +12,17 @@ import org.elasticsearch.features.FeatureSpecification; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.rest.action.admin.cluster.RestClusterGetSettingsAction; +import org.elasticsearch.rest.action.admin.cluster.RestNodesCapabilitiesAction; import java.util.Map; +import java.util.Set; public class RestFeatures implements FeatureSpecification { + @Override + public Set getFeatures() { + return Set.of(RestNodesCapabilitiesAction.CAPABILITIES_ACTION); + } + @Override public Map getHistoricalFeatures() { return Map.of(RestClusterGetSettingsAction.SUPPORTS_GET_SETTINGS_ACTION, Version.V_8_3_0); diff --git a/server/src/main/java/org/elasticsearch/rest/RestHandler.java b/server/src/main/java/org/elasticsearch/rest/RestHandler.java index c66fd72279670..4ab89618643f5 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestHandler.java +++ b/server/src/main/java/org/elasticsearch/rest/RestHandler.java @@ -18,6 +18,7 @@ import java.util.Collections; import java.util.List; import java.util.Objects; +import java.util.Set; /** * Handler for REST requests @@ -85,6 +86,22 @@ default List routes() { return Collections.emptyList(); } + /** + * The set of query parameters accepted by this rest handler, + * {@code null} if query parameters should not be checked nor validated. + * TODO - make this not nullable when all handlers have been updated + */ + default @Nullable Set supportedQueryParameters() { + return null; + } + + /** + * The set of capabilities this rest handler supports. + */ + default Set supportedCapabilities() { + return Set.of(); + } + /** * Controls whether requests handled by this class are allowed to to access system indices by default. * @return {@code true} if requests handled by this class should be allowed to access system indices. diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesCapabilitiesAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesCapabilitiesAction.java new file mode 100644 index 0000000000000..fae7903d02b82 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesCapabilitiesAction.java @@ -0,0 +1,63 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.rest.action.admin.cluster; + +import org.elasticsearch.action.admin.cluster.node.capabilities.NodesCapabilitiesRequest; +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.common.Strings; +import org.elasticsearch.features.NodeFeature; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.Scope; +import org.elasticsearch.rest.ServerlessScope; +import org.elasticsearch.rest.action.RestActions.NodesResponseRestListener; + +import java.io.IOException; +import java.net.URLDecoder; +import java.nio.charset.StandardCharsets; +import java.util.List; +import java.util.Set; + +@ServerlessScope(Scope.INTERNAL) +public class RestNodesCapabilitiesAction extends BaseRestHandler { + + public static final NodeFeature CAPABILITIES_ACTION = new NodeFeature("rest.capabilities_action"); + + @Override + public List routes() { + return List.of(new Route(RestRequest.Method.GET, "/_capabilities")); + } + + @Override + public Set supportedQueryParameters() { + return Set.of("timeout", "method", "path", "parameters", "capabilities"); + } + + @Override + public String getName() { + return "nodes_capabilities_action"; + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { + NodesCapabilitiesRequest r = new NodesCapabilitiesRequest().timeout(request.paramAsTime("timeout", null)) + .method(RestRequest.Method.valueOf(request.param("method", "GET"))) + .path(URLDecoder.decode(request.param("path"), StandardCharsets.UTF_8)) + .parameters(request.paramAsStringArray("parameters", Strings.EMPTY_ARRAY)) + .capabilities(request.paramAsStringArray("capabilities", Strings.EMPTY_ARRAY)) + .restApiVersion(request.getRestApiVersion()); + + return channel -> client.admin().cluster().nodesCapabilities(r, new NodesResponseRestListener<>(channel)); + } + + @Override + public boolean canTripCircuitBreaker() { + return false; + } +} diff --git a/server/src/main/java/org/elasticsearch/search/SearchModule.java b/server/src/main/java/org/elasticsearch/search/SearchModule.java index 97b747c650c1b..8d5fa0a7ac155 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchModule.java +++ b/server/src/main/java/org/elasticsearch/search/SearchModule.java @@ -254,6 +254,7 @@ import org.elasticsearch.search.vectors.KnnScoreDocQueryBuilder; import org.elasticsearch.search.vectors.KnnVectorQueryBuilder; import org.elasticsearch.search.vectors.QueryVectorBuilder; +import org.elasticsearch.telemetry.TelemetryProvider; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentParser; @@ -289,6 +290,11 @@ public class SearchModule { Setting.Property.NodeScope ); + /** + * Metric name for aggregation usage statistics + */ + private final TelemetryProvider telemetryProvider; + private final Map highlighters; private final List fetchSubPhases = new ArrayList<>(); @@ -306,7 +312,19 @@ public class SearchModule { * @param plugins List of included {@link SearchPlugin} objects. */ public SearchModule(Settings settings, List plugins) { + this(settings, plugins, TelemetryProvider.NOOP); + } + + /** + * Constructs a new SearchModule object + * + * @param settings Current settings + * @param plugins List of included {@link SearchPlugin} objects. + * @param telemetryProvider + */ + public SearchModule(Settings settings, List plugins, TelemetryProvider telemetryProvider) { this.settings = settings; + this.telemetryProvider = telemetryProvider; registerSuggesters(plugins); highlighters = setupHighlighters(settings, plugins); registerScoreFunctions(plugins); @@ -352,7 +370,7 @@ public Map getHighlighters() { } private ValuesSourceRegistry registerAggregations(List plugins) { - ValuesSourceRegistry.Builder builder = new ValuesSourceRegistry.Builder(); + ValuesSourceRegistry.Builder builder = new ValuesSourceRegistry.Builder(telemetryProvider.getMeterRegistry()); registerAggregation( new AggregationSpec(AvgAggregationBuilder.NAME, AvgAggregationBuilder::new, AvgAggregationBuilder.PARSER).addResultReader( diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java index f29850a306b75..e75b2d2002b0f 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java @@ -236,11 +236,7 @@ public int hashCode() { } boolean versionSupportsDownsamplingTimezone(TransportVersion version) { - return version.onOrAfter(TransportVersions.DATE_HISTOGRAM_SUPPORT_DOWNSAMPLED_TZ) - || version.between( - TransportVersions.DATE_HISTOGRAM_SUPPORT_DOWNSAMPLED_TZ_8_12_PATCH, - TransportVersions.NODE_STATS_REQUEST_SIMPLIFIED - ); + return version.onOrAfter(TransportVersions.V_8_13_0) || version.isPatchFrom(TransportVersions.V_8_12_1); } /** diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java index f8e7f3cf3a69c..91bb4c3f0cd74 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.util.ObjectObjectPagedHashMap; import org.elasticsearch.core.Releasables; import org.elasticsearch.search.DocValueFormat; +import org.elasticsearch.search.aggregations.AggregationErrors; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorReducer; @@ -29,6 +30,7 @@ import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.Optional; /** * Result of the significant terms aggregation. @@ -208,10 +210,27 @@ protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceCont reduceContext.bigArrays() ); + private InternalAggregation referenceAgg = null; + @Override public void accept(InternalAggregation aggregation) { + /* + canLeadReduction here is essentially checking if this shard returned data. Unmapped shards (that didn't + specify a missing value) will be false. Since they didn't return data, we can safely skip them, and + doing so prevents us from accidentally taking one as the reference agg for type checking, which would cause + shards that actually returned data to fail. + */ + if (aggregation.canLeadReduction() == false) { + return; + } @SuppressWarnings("unchecked") final InternalSignificantTerms terms = (InternalSignificantTerms) aggregation; + if (referenceAgg == null) { + referenceAgg = terms; + } else if (referenceAgg.getClass().equals(terms.getClass()) == false) { + // We got here because shards had different mappings for the same field (presumably different indices) + throw AggregationErrors.reduceTypeMismatch(referenceAgg.getName(), Optional.empty()); + } // Compute the overall result set size and the corpus size using the // top-level Aggregations from each shard globalSubsetSize += terms.getSubsetSize(); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationUsageService.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationUsageService.java index 853aa152db036..28ef6f934d287 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationUsageService.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationUsageService.java @@ -9,12 +9,18 @@ package org.elasticsearch.search.aggregations.support; import org.elasticsearch.node.ReportingService; +import org.elasticsearch.telemetry.metric.LongCounter; +import org.elasticsearch.telemetry.metric.MeterRegistry; import java.util.HashMap; import java.util.Map; import java.util.concurrent.atomic.LongAdder; public class AggregationUsageService implements ReportingService { + private static final String ES_SEARCH_QUERY_AGGREGATIONS_TOTAL_COUNT = "es.search.query.aggregations.total"; + private final String AGGREGATION_NAME_KEY = "aggregation_name"; + private final String VALUES_SOURCE_KEY = "values_source"; + private final LongCounter aggregationsUsageCounter; private final Map> aggs; private final AggregationInfo info; @@ -22,9 +28,16 @@ public class AggregationUsageService implements ReportingService> aggs; + private final MeterRegistry meterRegistry; public Builder() { + this(MeterRegistry.NOOP); + } + + public Builder(MeterRegistry meterRegistry) { aggs = new HashMap<>(); + assert meterRegistry != null; + this.meterRegistry = meterRegistry; } public void registerAggregationUsage(String aggregationName) { @@ -45,9 +58,16 @@ public AggregationUsageService build() { } } + // Attribute names for the metric + private AggregationUsageService(Builder builder) { this.aggs = builder.aggs; info = new AggregationInfo(aggs); + this.aggregationsUsageCounter = builder.meterRegistry.registerLongCounter( + ES_SEARCH_QUERY_AGGREGATIONS_TOTAL_COUNT, + "Aggregations usage", + "count" + ); } public void incAggregationUsage(String aggregationName, String valuesSourceType) { @@ -61,6 +81,8 @@ public void incAggregationUsage(String aggregationName, String valuesSourceType) assert adder != null : "Unknown subtype [" + aggregationName + "][" + valuesSourceType + "]"; } assert valuesSourceMap != null : "Unknown aggregation [" + aggregationName + "][" + valuesSourceType + "]"; + // tests will have a no-op implementation here + aggregationsUsageCounter.incrementBy(1, Map.of(AGGREGATION_NAME_KEY, aggregationName, VALUES_SOURCE_KEY, valuesSourceType)); } public Map getUsageStats() { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceRegistry.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceRegistry.java index 44e66d98f0258..fcfcad96d9fbf 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceRegistry.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceRegistry.java @@ -10,6 +10,7 @@ import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.aggregations.AggregationExecutionException; +import org.elasticsearch.telemetry.metric.MeterRegistry; import java.util.AbstractMap; import java.util.ArrayList; @@ -58,7 +59,11 @@ public static class Builder { private final Map, List>> aggregatorRegistry = new HashMap<>(); public Builder() { - this.usageServiceBuilder = new AggregationUsageService.Builder(); + this(MeterRegistry.NOOP); + } + + public Builder(MeterRegistry meterRegistry) { + this.usageServiceBuilder = new AggregationUsageService.Builder(meterRegistry); } /** diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhase.java index 3b8e4e69d9318..68e46186e4505 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhase.java @@ -28,7 +28,7 @@ public FetchSubPhaseProcessor getProcessor(FetchContext fetchContext) { } assert fetchSourceContext.fetchSource(); SourceFilter sourceFilter = fetchSourceContext.filter(); - + final boolean filterExcludesAll = sourceFilter.excludesAll(); return new FetchSubPhaseProcessor() { private int fastPath; @@ -67,8 +67,13 @@ private void hitExecute(FetchSourceContext fetchSourceContext, HitContext hitCon return; } - // Otherwise, filter the source and add it to the hit. - source = source.filter(sourceFilter); + if (filterExcludesAll) { + // we can just add an empty map + source = Source.empty(source.sourceContentType()); + } else { + // Otherwise, filter the source and add it to the hit. + source = source.filter(sourceFilter); + } if (nestedHit) { source = extractNested(source, hitContext.hit().getNestedIdentity()); } diff --git a/server/src/main/java/org/elasticsearch/search/internal/SearchContext.java b/server/src/main/java/org/elasticsearch/search/internal/SearchContext.java index 232c12e944a96..35f96ee2dc102 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/SearchContext.java +++ b/server/src/main/java/org/elasticsearch/search/internal/SearchContext.java @@ -351,6 +351,7 @@ public Query rewrittenQuery() { * Adds a releasable that will be freed when this context is closed. */ public void addReleasable(Releasable releasable) { // TODO most Releasables are managed by their callers. We probably don't need this. + assert closed.get() == false; releasables.add(releasable); } diff --git a/server/src/main/java/org/elasticsearch/search/lookup/SourceFilter.java b/server/src/main/java/org/elasticsearch/search/lookup/SourceFilter.java index 3bf32159c1676..ceffb32c08b48 100644 --- a/server/src/main/java/org/elasticsearch/search/lookup/SourceFilter.java +++ b/server/src/main/java/org/elasticsearch/search/lookup/SourceFilter.java @@ -109,4 +109,8 @@ private Function buildBytesFilter() { } }; } + + public boolean excludesAll() { + return Arrays.asList(excludes).contains("*"); + } } diff --git a/server/src/main/java/org/elasticsearch/search/vectors/ExactKnnQueryBuilder.java b/server/src/main/java/org/elasticsearch/search/vectors/ExactKnnQueryBuilder.java index 60b0d259961da..1f05b215699b1 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/ExactKnnQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/ExactKnnQueryBuilder.java @@ -128,6 +128,6 @@ protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.NESTED_KNN_MORE_INNER_HITS; + return TransportVersions.V_8_13_0; } } diff --git a/server/src/main/java/org/elasticsearch/search/vectors/KnnScoreDocQueryBuilder.java b/server/src/main/java/org/elasticsearch/search/vectors/KnnScoreDocQueryBuilder.java index 6de6338b604ef..65f8c60297ad8 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/KnnScoreDocQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/KnnScoreDocQueryBuilder.java @@ -63,7 +63,7 @@ public KnnScoreDocQueryBuilder(ScoreDoc[] scoreDocs, String fieldName, VectorDat public KnnScoreDocQueryBuilder(StreamInput in) throws IOException { super(in); this.scoreDocs = in.readArray(Lucene::readScoreDoc, ScoreDoc[]::new); - if (in.getTransportVersion().onOrAfter(TransportVersions.NESTED_KNN_MORE_INNER_HITS)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { this.fieldName = in.readOptionalString(); if (in.readBoolean()) { if (in.getTransportVersion().onOrAfter(TransportVersions.KNN_EXPLICIT_BYTE_QUERY_VECTOR_PARSING)) { @@ -100,7 +100,7 @@ VectorData queryVector() { @Override protected void doWriteTo(StreamOutput out) throws IOException { out.writeArray(Lucene::writeScoreDoc, scoreDocs); - if (out.getTransportVersion().onOrAfter(TransportVersions.NESTED_KNN_MORE_INNER_HITS)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeOptionalString(fieldName); if (queryVector != null) { out.writeBoolean(true); diff --git a/server/src/main/java/org/elasticsearch/search/vectors/KnnVectorQueryBuilder.java b/server/src/main/java/org/elasticsearch/search/vectors/KnnVectorQueryBuilder.java index aa5daa532cf42..0c8dfc9a98330 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/KnnVectorQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/KnnVectorQueryBuilder.java @@ -167,7 +167,7 @@ private KnnVectorQueryBuilder( public KnnVectorQueryBuilder(StreamInput in) throws IOException { super(in); this.fieldName = in.readString(); - if (in.getTransportVersion().onOrAfter(TransportVersions.KNN_QUERY_NUMCANDS_AS_OPTIONAL_PARAM)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { this.numCands = in.readOptionalVInt(); } else { this.numCands = in.readVInt(); @@ -245,7 +245,7 @@ protected void doWriteTo(StreamOutput out) throws IOException { throw new IllegalStateException("missing a rewriteAndFetch?"); } out.writeString(fieldName); - if (out.getTransportVersion().onOrAfter(TransportVersions.KNN_QUERY_NUMCANDS_AS_OPTIONAL_PARAM)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeOptionalVInt(numCands); } else { if (numCands == null) { diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java index d505a6ded4809..dd8ddcffd5fe3 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java @@ -4126,7 +4126,7 @@ static ClusterState executeBatch( } private static boolean supportsNodeRemovalTracking(ClusterState clusterState) { - return clusterState.getMinTransportVersion().onOrAfter(TransportVersions.SNAPSHOTS_IN_PROGRESS_TRACKING_REMOVING_NODES_ADDED); + return clusterState.getMinTransportVersion().onOrAfter(TransportVersions.V_8_13_0); } private final MasterServiceTaskQueue updateNodeIdsToRemoveQueue; diff --git a/server/src/main/resources/org/elasticsearch/TransportVersions.csv b/server/src/main/resources/org/elasticsearch/TransportVersions.csv index e6f0da6a45452..526f327b91c19 100644 --- a/server/src/main/resources/org/elasticsearch/TransportVersions.csv +++ b/server/src/main/resources/org/elasticsearch/TransportVersions.csv @@ -119,3 +119,4 @@ 8.13.1,8595000 8.13.2,8595000 8.13.3,8595000 +8.13.4,8595001 diff --git a/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv b/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv index bc6523c98761c..39f2a701726af 100644 --- a/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv +++ b/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv @@ -119,3 +119,4 @@ 8.13.1,8503000 8.13.2,8503000 8.13.3,8503000 +8.13.4,8503000 diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/resolve/TransportResolveClusterActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/resolve/TransportResolveClusterActionTests.java index 2a64fbad97575..d76bfc03e1d7f 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/resolve/TransportResolveClusterActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/resolve/TransportResolveClusterActionTests.java @@ -67,8 +67,8 @@ public void testCCSCompatibilityCheck() { @Override public void writeTo(StreamOutput out) throws IOException { throw new UnsupportedOperationException( - "ResolveClusterAction requires at least Transport Version " - + TransportVersions.RESOLVE_CLUSTER_ENDPOINT_ADDED.toReleaseVersion() + "ResolveClusterAction requires at least version " + + TransportVersions.V_8_13_0.toReleaseVersion() + " but was " + out.getTransportVersion().toReleaseVersion() ); @@ -99,7 +99,7 @@ public void writeTo(StreamOutput out) throws IOException { assertThat(ex.getMessage(), containsString("not compatible with version")); assertThat(ex.getMessage(), containsString("and the 'search.check_ccs_compatibility' setting is enabled.")); - assertThat(ex.getCause().getMessage(), containsString("ResolveClusterAction requires at least Transport Version")); + assertThat(ex.getCause().getMessage(), containsString("ResolveClusterAction requires at least version")); } finally { assertTrue(ESTestCase.terminate(threadPool)); } diff --git a/server/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java b/server/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java index b873bec2bd427..94e0ce1ccaf17 100644 --- a/server/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java @@ -7,6 +7,7 @@ */ package org.elasticsearch.action.support.master; +import org.apache.logging.log4j.Level; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.ActionListener; @@ -36,6 +37,7 @@ import org.elasticsearch.cluster.node.DiscoveryNodeRole; import org.elasticsearch.cluster.node.DiscoveryNodeUtils; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.service.ClusterApplierService; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; @@ -58,6 +60,8 @@ import org.elasticsearch.tasks.TaskId; import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.MockLogAppender; +import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.test.transport.CapturingTransport; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; @@ -475,6 +479,7 @@ public void testMasterNotAvailable() throws ExecutionException, InterruptedExcep assertFalse(request.hasReferences()); } + @TestLogging(reason = "testing TRACE logging", value = "org.elasticsearch.cluster.service:TRACE") public void testMasterBecomesAvailable() throws ExecutionException, InterruptedException { Request request = new Request(); if (randomBoolean()) { @@ -482,11 +487,24 @@ public void testMasterBecomesAvailable() throws ExecutionException, InterruptedE } setState(clusterService, ClusterStateCreationUtils.state(localNode, null, allNodes)); PlainActionFuture listener = new PlainActionFuture<>(); - ActionTestUtils.execute(new Action("internal:testAction", transportService, clusterService, threadPool), null, request, listener); + final var task = new Task(randomNonNegativeLong(), "test", "internal:testAction", "", TaskId.EMPTY_TASK_ID, Map.of()); + ActionTestUtils.execute(new Action("internal:testAction", transportService, clusterService, threadPool), task, request, listener); assertFalse(listener.isDone()); request.decRef(); assertTrue(request.hasReferences()); - setState(clusterService, ClusterStateCreationUtils.state(localNode, localNode, allNodes)); + + MockLogAppender.assertThatLogger( + () -> setState(clusterService, ClusterStateCreationUtils.state(localNode, localNode, allNodes)), + ClusterApplierService.class, + new MockLogAppender.SeenEventExpectation( + "listener log", + ClusterApplierService.class.getCanonicalName(), + Level.TRACE, + "calling [ClusterStateObserver[ObservingContext[ContextPreservingListener[listener for [execution of [" + + task + + "]] retrying after cluster state version [*]]]]] with change to version [*]" + ) + ); assertTrue(listener.isDone()); assertFalse(request.hasReferences()); listener.get(); diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/JoinValidationServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/JoinValidationServiceTests.java index 6df9260b2bccf..79203899b665d 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/JoinValidationServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/JoinValidationServiceTests.java @@ -60,6 +60,7 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; +import static org.elasticsearch.action.support.ActionTestUtils.assertNoSuccessListener; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.containsString; @@ -295,17 +296,9 @@ protected void onSendRequest(long requestId, String action, TransportRequest req assertSame(node, joiningNode); assertEquals(JoinValidationService.JOIN_VALIDATE_ACTION_NAME, action); - final var listener = new ActionListener() { - @Override - public void onResponse(TransportResponse transportResponse) { - fail("should not succeed"); - } - - @Override - public void onFailure(Exception e) { - handleError(requestId, new RemoteTransportException(node.getName(), node.getAddress(), action, e)); - } - }; + final ActionListener listener = assertNoSuccessListener( + e -> handleError(requestId, new RemoteTransportException(node.getName(), node.getAddress(), action, e)) + ); try (var ignored = NamedWriteableRegistryTests.ignoringUnknownNamedWriteables(); var out = new BytesStreamOutput()) { request.writeTo(out); diff --git a/server/src/test/java/org/elasticsearch/cluster/service/MasterServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/service/MasterServiceTests.java index 50030143ec354..617e1cb09c353 100644 --- a/server/src/test/java/org/elasticsearch/cluster/service/MasterServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/service/MasterServiceTests.java @@ -80,6 +80,7 @@ import java.util.stream.Collectors; import static java.util.Collections.emptySet; +import static org.elasticsearch.action.support.ActionTestUtils.assertNoSuccessListener; import static org.elasticsearch.cluster.service.MasterService.MAX_TASK_DESCRIPTION_CHARS; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.contains; @@ -1041,30 +1042,22 @@ public void onFailure(Exception e) { threadContext.putHeader(testContextHeaderName, testContextHeaderValue); final var expectFailure = randomBoolean(); final var taskComplete = new AtomicBoolean(); - final var task = new Task(expectFailure, testResponseHeaderValue, new ActionListener<>() { - @Override - public void onResponse(ClusterState clusterState) { - throw new AssertionError("should not succeed"); + final var task = new Task(expectFailure, testResponseHeaderValue, assertNoSuccessListener(e -> { + assertEquals(testContextHeaderValue, threadContext.getHeader(testContextHeaderName)); + assertEquals(List.of(testResponseHeaderValue), threadContext.getResponseHeaders().get(testResponseHeaderName)); + assertThat(e, instanceOf(FailedToCommitClusterStateException.class)); + assertThat(e.getMessage(), equalTo(publicationFailedExceptionMessage)); + if (expectFailure) { + assertThat(e.getSuppressed().length, greaterThan(0)); + var suppressed = e.getSuppressed()[0]; + assertThat(suppressed, instanceOf(ElasticsearchException.class)); + assertThat(suppressed.getMessage(), equalTo(taskFailedExceptionMessage)); } - - @Override - public void onFailure(Exception e) { - assertEquals(testContextHeaderValue, threadContext.getHeader(testContextHeaderName)); - assertEquals(List.of(testResponseHeaderValue), threadContext.getResponseHeaders().get(testResponseHeaderName)); - assertThat(e, instanceOf(FailedToCommitClusterStateException.class)); - assertThat(e.getMessage(), equalTo(publicationFailedExceptionMessage)); - if (expectFailure) { - assertThat(e.getSuppressed().length, greaterThan(0)); - var suppressed = e.getSuppressed()[0]; - assertThat(suppressed, instanceOf(ElasticsearchException.class)); - assertThat(suppressed.getMessage(), equalTo(taskFailedExceptionMessage)); - } - assertNotNull(publishedState.get()); - assertNotSame(stateBeforeFailure, publishedState.get()); - assertTrue(taskComplete.compareAndSet(false, true)); - publishFailureCountdown.countDown(); - } - }); + assertNotNull(publishedState.get()); + assertNotSame(stateBeforeFailure, publishedState.get()); + assertTrue(taskComplete.compareAndSet(false, true)); + publishFailureCountdown.countDown(); + })); queue.submitTask("test", task, null); } diff --git a/server/src/test/java/org/elasticsearch/common/file/MasterNodeFileWatchingServiceTests.java b/server/src/test/java/org/elasticsearch/common/file/MasterNodeFileWatchingServiceTests.java new file mode 100644 index 0000000000000..f92097f53bb81 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/common/file/MasterNodeFileWatchingServiceTests.java @@ -0,0 +1,119 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.common.file; + +import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlocks; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodeRole; +import org.elasticsearch.cluster.node.DiscoveryNodeUtils; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.gateway.GatewayService; +import org.elasticsearch.node.NodeRoleSettings; +import org.elasticsearch.test.ESTestCase; +import org.junit.After; +import org.junit.Before; + +import java.io.IOException; +import java.nio.file.Path; +import java.util.concurrent.ExecutionException; + +import static org.hamcrest.Matchers.is; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class MasterNodeFileWatchingServiceTests extends ESTestCase { + + static final DiscoveryNode localNode = DiscoveryNodeUtils.create("local-node"); + MasterNodeFileWatchingService testService; + Path watchedFile; + Runnable fileChangedCallback; + + @Before + public void setupTestService() throws IOException { + watchedFile = createTempFile(); + ClusterService clusterService = mock(ClusterService.class); + Settings settings = Settings.builder() + .put(NodeRoleSettings.NODE_ROLES_SETTING.getKey(), DiscoveryNodeRole.MASTER_ROLE.roleName()) + .build(); + when(clusterService.getSettings()).thenReturn(settings); + fileChangedCallback = () -> {}; + testService = new MasterNodeFileWatchingService(clusterService, watchedFile) { + + @Override + protected void processFileChanges() throws InterruptedException, ExecutionException, IOException { + fileChangedCallback.run(); + } + + @Override + protected void processInitialFileMissing() throws InterruptedException, ExecutionException, IOException { + // file always exists, but we don't care about the missing case for master node behavior + } + }; + testService.start(); + } + + @After + public void stopTestService() { + testService.stop(); + } + + public void testBecomingMasterNodeStartsWatcher() { + ClusterState notRecoveredClusterState = ClusterState.builder(ClusterName.DEFAULT) + .nodes(DiscoveryNodes.builder().add(localNode).localNodeId(localNode.getId()).masterNodeId(localNode.getId())) + .blocks(ClusterBlocks.builder().addGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK)) + .build(); + testService.clusterChanged(new ClusterChangedEvent("test", notRecoveredClusterState, ClusterState.EMPTY_STATE)); + // just a master node isn't sufficient, cluster state also must be recovered + assertThat(testService.watching(), is(false)); + + ClusterState recoveredClusterState = ClusterState.builder(notRecoveredClusterState) + .blocks(ClusterBlocks.EMPTY_CLUSTER_BLOCK) + .build(); + testService.clusterChanged(new ClusterChangedEvent("test", recoveredClusterState, notRecoveredClusterState)); + // just a master node isn't sufficient, cluster state also must be recovered + assertThat(testService.watching(), is(true)); + } + + public void testChangingMasterStopsWatcher() { + ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT) + .nodes(DiscoveryNodes.builder().add(localNode).localNodeId(localNode.getId()).masterNodeId(localNode.getId())) + .build(); + testService.clusterChanged(new ClusterChangedEvent("test", clusterState, ClusterState.EMPTY_STATE)); + assertThat(testService.watching(), is(true)); + + final DiscoveryNode anotherNode = DiscoveryNodeUtils.create("another-node"); + ClusterState differentMasterClusterState = ClusterState.builder(ClusterName.DEFAULT) + .nodes( + DiscoveryNodes.builder().add(localNode).add(anotherNode).localNodeId(localNode.getId()).masterNodeId(anotherNode.getId()) + ) + .build(); + testService.clusterChanged(new ClusterChangedEvent("test", differentMasterClusterState, clusterState)); + assertThat(testService.watching(), is(false)); + } + + public void testBlockingClusterStateStopsWatcher() { + ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT) + .nodes(DiscoveryNodes.builder().add(localNode).localNodeId(localNode.getId()).masterNodeId(localNode.getId())) + .build(); + testService.clusterChanged(new ClusterChangedEvent("test", clusterState, ClusterState.EMPTY_STATE)); + assertThat(testService.watching(), is(true)); + + ClusterState blockedClusterState = ClusterState.builder(ClusterName.DEFAULT) + .nodes(DiscoveryNodes.builder().add(localNode).localNodeId(localNode.getId()).masterNodeId(localNode.getId())) + .blocks(ClusterBlocks.builder().addGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK)) + .build(); + testService.clusterChanged(new ClusterChangedEvent("test", blockedClusterState, clusterState)); + assertThat(testService.watching(), is(false)); + } +} diff --git a/server/src/test/java/org/elasticsearch/common/settings/SettingsFilterTests.java b/server/src/test/java/org/elasticsearch/common/settings/SettingsFilterTests.java index 4885bbc277cb4..8e62a9306a3d4 100644 --- a/server/src/test/java/org/elasticsearch/common/settings/SettingsFilterTests.java +++ b/server/src/test/java/org/elasticsearch/common/settings/SettingsFilterTests.java @@ -119,7 +119,6 @@ private void assertExpectedLogMessages(Consumer consumer, MockLogAppende Logger testLogger = LogManager.getLogger("org.elasticsearch.test"); MockLogAppender appender = new MockLogAppender(); try (var ignored = appender.capturing("org.elasticsearch.test")) { - appender.start(); Arrays.stream(expectations).forEach(appender::addExpectation); consumer.accept(testLogger); appender.assertAllExpectationsMatched(); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldsBuilderTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldsBuilderTests.java index 229e2e6f72cc1..bab046d41b6e5 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldsBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldsBuilderTests.java @@ -68,7 +68,7 @@ public void testCreateDynamicStringFieldAsKeywordForDimension() throws IOExcepti XContentParser parser = createParser(JsonXContent.jsonXContent, source); SourceToParse sourceToParse = new SourceToParse("test", new BytesArray(source), XContentType.JSON); - SourceFieldMapper sourceMapper = new SourceFieldMapper.Builder(null, Settings.EMPTY).setSynthetic().build(); + SourceFieldMapper sourceMapper = new SourceFieldMapper.Builder(null, Settings.EMPTY, false).setSynthetic().build(); RootObjectMapper root = new RootObjectMapper.Builder("_doc", Explicit.IMPLICIT_TRUE).add( new PassThroughObjectMapper.Builder("labels").setContainsDimensions().dynamic(ObjectMapper.Dynamic.TRUE) ).build(MapperBuilderContext.root(false, false)); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java index a5264512d8086..802a18645eab6 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java @@ -13,6 +13,8 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.index.IndexVersions; +import org.elasticsearch.test.index.IndexVersionUtils; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentParser; @@ -298,4 +300,44 @@ public void testSupportsNonDefaultParameterValues() throws IOException { ); assertThat(e.getMessage(), containsString("Parameters [enabled,includes,excludes] are not allowed in source")); } + + public void testBypassCheckForNonDefaultParameterValuesInEarlierVersions() throws IOException { + Settings settings = Settings.builder().put(SourceFieldMapper.LOSSY_PARAMETERS_ALLOWED_SETTING_NAME, false).build(); + { + var sourceFieldMapper = createMapperService( + IndexVersionUtils.getPreviousVersion(IndexVersions.SOURCE_MAPPER_LOSSY_PARAMS_CHECK), + settings, + () -> true, + topMapping(b -> b.startObject("_source").field("enabled", false).endObject()) + ).documentMapper().sourceMapper(); + assertThat(sourceFieldMapper, notNullValue()); + } + { + var sourceFieldMapper = createMapperService( + IndexVersionUtils.getPreviousVersion(IndexVersions.SOURCE_MAPPER_LOSSY_PARAMS_CHECK), + settings, + () -> true, + topMapping(b -> b.startObject("_source").array("includes", "foo").endObject()) + ).documentMapper().sourceMapper(); + assertThat(sourceFieldMapper, notNullValue()); + } + { + var sourceFieldMapper = createMapperService( + IndexVersionUtils.getPreviousVersion(IndexVersions.SOURCE_MAPPER_LOSSY_PARAMS_CHECK), + settings, + () -> true, + topMapping(b -> b.startObject("_source").array("excludes", "foo").endObject()) + ).documentMapper().sourceMapper(); + assertThat(sourceFieldMapper, notNullValue()); + } + { + var sourceFieldMapper = createMapperService( + IndexVersionUtils.getPreviousVersion(IndexVersions.SOURCE_MAPPER_LOSSY_PARAMS_CHECK), + settings, + () -> true, + topMapping(b -> b.startObject("_source").field("mode", "disabled").endObject()) + ).documentMapper().sourceMapper(); + assertThat(sourceFieldMapper, notNullValue()); + } + } } diff --git a/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java b/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java index 3085ff89603ce..e541c680ada1b 100644 --- a/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java @@ -382,7 +382,7 @@ public void testSearchRequestRuntimeFieldsAndMultifieldDetection() { public void testSyntheticSourceSearchLookup() throws IOException { // Build a mapping using synthetic source - SourceFieldMapper sourceMapper = new SourceFieldMapper.Builder(null, Settings.EMPTY).setSynthetic().build(); + SourceFieldMapper sourceMapper = new SourceFieldMapper.Builder(null, Settings.EMPTY, false).setSynthetic().build(); RootObjectMapper root = new RootObjectMapper.Builder("_doc", Explicit.IMPLICIT_TRUE).add( new KeywordFieldMapper.Builder("cat", IndexVersion.current()).ignoreAbove(100) ).build(MapperBuilderContext.root(true, false)); diff --git a/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java b/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java index 72abe322c702b..efa46443e2da0 100644 --- a/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java +++ b/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java @@ -4000,8 +4000,8 @@ static boolean hasCircularReference(Exception cause) { return false; } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/108321") public void testDisabledFsync() throws IOException { + var translogDir = createTempDir(); var config = new TranslogConfig( shardId, translogDir, diff --git a/server/src/test/java/org/elasticsearch/readiness/ReadinessServiceTests.java b/server/src/test/java/org/elasticsearch/readiness/ReadinessServiceTests.java index 1ad790ae31804..88661abf5f1fe 100644 --- a/server/src/test/java/org/elasticsearch/readiness/ReadinessServiceTests.java +++ b/server/src/test/java/org/elasticsearch/readiness/ReadinessServiceTests.java @@ -49,6 +49,7 @@ import java.util.Set; import static org.elasticsearch.cluster.metadata.ReservedStateErrorMetadata.ErrorKind.TRANSIENT; +import static org.elasticsearch.cluster.metadata.ReservedStateMetadata.EMPTY_VERSION; public class ReadinessServiceTests extends ESTestCase implements ReadinessClientProbe { private ClusterService clusterService; @@ -59,7 +60,7 @@ public class ReadinessServiceTests extends ESTestCase implements ReadinessClient private static Metadata emptyReservedStateMetadata; static { - var fileSettingsState = new ReservedStateMetadata.Builder(FileSettingsService.NAMESPACE).version(-1L); + var fileSettingsState = new ReservedStateMetadata.Builder(FileSettingsService.NAMESPACE).version(EMPTY_VERSION); emptyReservedStateMetadata = new Metadata.Builder().put(fileSettingsState.build()).build(); } diff --git a/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java b/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java index 53ca55f8a5f81..aca5d2cbee2c9 100644 --- a/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java +++ b/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java @@ -8,8 +8,8 @@ package org.elasticsearch.reservedstate.service; -import org.apache.lucene.tests.util.LuceneTestCase.AwaitsFix; import org.elasticsearch.Version; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; @@ -55,7 +55,6 @@ import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; -@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/106968") public class FileSettingsServiceTests extends ESTestCase { private Environment env; private ClusterService clusterService; @@ -234,54 +233,11 @@ public void testStopWorksInMiddleOfProcessing() throws Exception { return new ReservedStateChunk(Collections.emptyMap(), new ReservedStateVersion(1L, Version.CURRENT)); }).when(spiedController).parse(any(String.class), any()); - service.start(); - service.clusterChanged(new ClusterChangedEvent("test", clusterService.state(), ClusterState.EMPTY_STATE)); - assertTrue(service.watching()); - - Files.createDirectories(service.watchedFileDir()); - - // Make some fake settings file to cause the file settings service to process it - writeTestFile(service.watchedFile(), "{}"); - - // we need to wait a bit, on MacOS it may take up to 10 seconds for the Java watcher service to notice the file, - // on Linux is instantaneous. Windows is instantaneous too. - assertTrue(processFileLatch.await(30, TimeUnit.SECONDS)); - - // Stopping the service should interrupt the watcher thread, we should be able to stop - service.stop(); - assertFalse(service.watching()); - service.close(); - // let the deadlocked thread end, so we can cleanly exit the test - deadThreadLatch.countDown(); - } - - public void testStopWorksIfProcessingDidntReturnYet() throws Exception { - var spiedController = spy(controller); - var service = new FileSettingsService(clusterService, spiedController, env); - - CountDownLatch processFileLatch = new CountDownLatch(1); - CountDownLatch deadThreadLatch = new CountDownLatch(1); - - doAnswer((Answer) invocation -> { - // allow the other thread to continue, but hold on a bit to avoid - // completing the task immediately in the main watcher loop - try { - Thread.sleep(1_000); - } catch (InterruptedException e) { - // pass it on - Thread.currentThread().interrupt(); - } - processFileLatch.countDown(); - new Thread(() -> { - // Simulate a thread that never allows the completion to complete - try { - deadThreadLatch.await(); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - }).start(); - return new ReservedStateChunk(Collections.emptyMap(), new ReservedStateVersion(1L, Version.CURRENT)); - }).when(spiedController).parse(any(String.class), any()); + doAnswer((Answer) invocation -> { + var completionListener = invocation.getArgument(1, ActionListener.class); + completionListener.onResponse(null); + return null; + }).when(spiedController).initEmpty(any(String.class), any()); service.start(); service.clusterChanged(new ClusterChangedEvent("test", clusterService.state(), ClusterState.EMPTY_STATE)); @@ -296,7 +252,7 @@ public void testStopWorksIfProcessingDidntReturnYet() throws Exception { // on Linux is instantaneous. Windows is instantaneous too. assertTrue(processFileLatch.await(30, TimeUnit.SECONDS)); - // Stopping the service should interrupt the watcher thread, allowing the whole thing to exit + // Stopping the service should interrupt the watcher thread, we should be able to stop service.stop(); assertFalse(service.watching()); service.close(); diff --git a/server/src/test/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTaskTests.java b/server/src/test/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTaskTests.java new file mode 100644 index 0000000000000..d887d7edb19f2 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTaskTests.java @@ -0,0 +1,31 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.reservedstate.service; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlocks; +import org.elasticsearch.gateway.GatewayService; +import org.elasticsearch.test.ESTestCase; + +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.sameInstance; + +public class ReservedStateUpdateTaskTests extends ESTestCase { + public void testBlockedClusterState() { + var task = new ReservedStateUpdateTask("dummy", null, List.of(), Map.of(), List.of(), e -> {}, ActionListener.noop()); + ClusterState notRecoveredClusterState = ClusterState.builder(ClusterName.DEFAULT) + .blocks(ClusterBlocks.builder().addGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK)) + .build(); + assertThat(task.execute(notRecoveredClusterState), sameInstance(notRecoveredClusterState)); + } +} diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhaseTests.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhaseTests.java index 3a4d67ae281f2..2b8bf0dad65fe 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhaseTests.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhaseTests.java @@ -52,6 +52,27 @@ public void testBasicFiltering() throws IOException { assertEquals(Collections.singletonMap("field1", "value"), hitContext.hit().getSourceAsMap()); } + public void testExcludesAll() throws IOException { + XContentBuilder source = XContentFactory.jsonBuilder().startObject().field("field1", "value").field("field2", "value2").endObject(); + HitContext hitContext = hitExecute(source, false, null, null); + assertNull(hitContext.hit().getSourceAsMap()); + + hitContext = hitExecute(source, true, "field1", "*"); + assertEquals(Collections.emptyMap(), hitContext.hit().getSourceAsMap()); + + hitContext = hitExecute(source, true, null, "*"); + assertEquals(Collections.emptyMap(), hitContext.hit().getSourceAsMap()); + + hitContext = hitExecute(source, true, "*", "*"); + assertEquals(Collections.emptyMap(), hitContext.hit().getSourceAsMap()); + + hitContext = hitExecuteMultiple(source, true, new String[] { "field1", "field2" }, new String[] { "*", "field1" }); + assertEquals(Collections.emptyMap(), hitContext.hit().getSourceAsMap()); + + hitContext = hitExecuteMultiple(source, true, null, new String[] { "field2", "*", "field1" }); + assertEquals(Collections.emptyMap(), hitContext.hit().getSourceAsMap()); + } + public void testMultipleFiltering() throws IOException { XContentBuilder source = XContentFactory.jsonBuilder().startObject().field("field", "value").field("field2", "value2").endObject(); HitContext hitContext = hitExecuteMultiple(source, true, new String[] { "*.notexisting", "field" }, null); diff --git a/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java index 38f8ad4766b7e..5c034a81fc9cd 100644 --- a/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java +++ b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java @@ -269,7 +269,6 @@ public void testManyEval() throws IOException { assertMap(map, matchesMap().entry("columns", columns).entry("values", hasSize(10_000))); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/108104") public void testTooManyEval() throws IOException { initManyLongs(); assertCircuitBreaks(() -> manyEval(490)); diff --git a/test/framework/src/main/java/org/elasticsearch/action/support/ActionTestUtils.java b/test/framework/src/main/java/org/elasticsearch/action/support/ActionTestUtils.java index 187a8b6e4eab2..023305101f4c4 100644 --- a/test/framework/src/main/java/org/elasticsearch/action/support/ActionTestUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/action/support/ActionTestUtils.java @@ -22,6 +22,9 @@ import java.util.Map; import java.util.concurrent.TimeUnit; +import java.util.function.Consumer; + +import static org.elasticsearch.test.ESTestCase.fail; public class ActionTestUtils { @@ -77,6 +80,27 @@ public static ActionListener assertNoFailureListener(CheckedConsumer ActionListener assertNoSuccessListener(Consumer consumer) { + return new ActionListener<>() { + @Override + public void onResponse(T result) { + fail(null, "unexpected success with result [%s] while expecting to handle failure with [%s]", result, consumer); + } + + @Override + public void onFailure(Exception e) { + try { + consumer.accept(e); + } catch (Exception e2) { + if (e2 != e) { + e2.addSuppressed(e); + } + fail(e2, "unexpected failure in onFailure handler for [%s]", consumer); + } + } + }; + } + public static ResponseListener wrapAsRestResponseListener(ActionListener listener) { return new ResponseListener() { @Override diff --git a/test/framework/src/main/java/org/elasticsearch/action/support/CancellableActionTestPlugin.java b/test/framework/src/main/java/org/elasticsearch/action/support/CancellableActionTestPlugin.java index 115ea63fb243e..dad0e3b613efb 100644 --- a/test/framework/src/main/java/org/elasticsearch/action/support/CancellableActionTestPlugin.java +++ b/test/framework/src/main/java/org/elasticsearch/action/support/CancellableActionTestPlugin.java @@ -26,6 +26,7 @@ import static org.elasticsearch.ExceptionsHelper.unwrapCause; import static org.elasticsearch.action.support.ActionTestUtils.assertNoFailureListener; +import static org.elasticsearch.action.support.ActionTestUtils.assertNoSuccessListener; import static org.elasticsearch.test.ESIntegTestCase.internalCluster; import static org.elasticsearch.test.ESTestCase.asInstanceOf; import static org.elasticsearch.test.ESTestCase.randomInt; @@ -37,7 +38,6 @@ import static org.junit.Assert.assertNull; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; /** * Utility plugin that captures the invocation of an action on a node after the task has been registered with the {@link TaskManager}, @@ -128,19 +128,11 @@ public void app if (capturingListener != null) { final var cancellableTask = asInstanceOf(CancellableTask.class, task); capturingListener.addListener(assertNoFailureListener(captured -> { - cancellableTask.addListener(() -> chain.proceed(task, action, request, new ActionListener<>() { - @Override - public void onResponse(Response response) { - fail("cancelled action should not succeed, but got " + response); - } - - @Override - public void onFailure(Exception e) { - assertThat(unwrapCause(e), instanceOf(TaskCancelledException.class)); - listener.onFailure(e); - captured.countDownLatch().countDown(); - } - })); + cancellableTask.addListener(() -> chain.proceed(task, action, request, assertNoSuccessListener(e -> { + assertThat(unwrapCause(e), instanceOf(TaskCancelledException.class)); + listener.onFailure(e); + captured.countDownLatch().countDown(); + }))); assertFalse(cancellableTask.isCancelled()); captured.doCancel().run(); })); diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractWireTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractWireTestCase.java index 8d4085623d156..eccbf602f2c71 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractWireTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractWireTestCase.java @@ -234,11 +234,24 @@ protected final void assertSerialization(T testInstance, TransportVersion versio * how equality is checked. */ protected void assertEqualInstances(T expectedInstance, T newInstance) { - assertNotSame(newInstance, expectedInstance); + if (shouldBeSame(newInstance)) { + assertSame(newInstance, expectedInstance); + } else { + assertNotSame(newInstance, expectedInstance); + } assertThat(newInstance, equalTo(expectedInstance)); assertThat(newInstance.hashCode(), equalTo(expectedInstance.hashCode())); } + /** + * Should this copy be the same instance as what we're copying? Defaults to + * {@code false} but implementers might override if the serialization returns + * a reuse constant. + */ + protected boolean shouldBeSame(T newInstance) { + return false; + } + protected final T copyInstance(T instance) throws IOException { return copyInstance(instance, TransportVersion.current()); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index bea222a9d8341..83f7fdfe386c7 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -64,6 +64,7 @@ import org.elasticsearch.common.logging.LogConfigurator; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.lucene.Lucene; +import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.time.DateUtils; @@ -259,6 +260,7 @@ public static void resetPortCounter() { // TODO: consolidate logging initialization for tests so it all occurs in logconfigurator LogConfigurator.loadLog4jPlugins(); LogConfigurator.configureESLogging(); + MockLogAppender.init(); final List testAppenders = new ArrayList<>(3); for (String leakLoggerName : Arrays.asList("io.netty.util.ResourceLeakDetector", LeakTracker.class.getName())) { @@ -1058,6 +1060,11 @@ public static String randomAlphaOfLength(int codeUnits) { return RandomizedTest.randomAsciiOfLength(codeUnits); } + public static SecureString randomSecureStringOfLength(int codeUnits) { + var randomAlpha = randomAlphaOfLength(codeUnits); + return new SecureString(randomAlpha.toCharArray()); + } + public static String randomNullOrAlphaOfLength(int codeUnits) { return randomBoolean() ? null : randomAlphaOfLength(codeUnits); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/MockLogAppender.java b/test/framework/src/main/java/org/elasticsearch/test/MockLogAppender.java index 10a3a8a78e483..bc3723119afa9 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/MockLogAppender.java +++ b/test/framework/src/main/java/org/elasticsearch/test/MockLogAppender.java @@ -9,7 +9,6 @@ import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.core.LogEvent; import org.apache.logging.log4j.core.appender.AbstractAppender; import org.apache.logging.log4j.core.config.Property; @@ -19,9 +18,10 @@ import java.util.Arrays; import java.util.List; +import java.util.Map; import java.util.Objects; +import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CopyOnWriteArrayList; -import java.util.concurrent.atomic.AtomicBoolean; import java.util.regex.Pattern; import static org.hamcrest.CoreMatchers.equalTo; @@ -31,12 +31,38 @@ /** * Test appender that can be used to verify that certain events were logged correctly */ -public class MockLogAppender extends AbstractAppender { +public class MockLogAppender { + private static final Map> mockAppenders = new ConcurrentHashMap<>(); + private static final RealMockAppender parent = new RealMockAppender(); private final List expectations; + private volatile boolean isAlive = true; + + private static class RealMockAppender extends AbstractAppender { + + RealMockAppender() { + super("mock", null, null, false, Property.EMPTY_ARRAY); + } + + @Override + public void append(LogEvent event) { + List appenders = mockAppenders.get(event.getLoggerName()); + if (appenders == null) { + // check if there is a root appender + appenders = mockAppenders.getOrDefault("", List.of()); + } + for (MockLogAppender appender : appenders) { + if (appender.isAlive == false) { + continue; + } + for (LoggingExpectation expectation : appender.expectations) { + expectation.match(event); + } + } + } + } public MockLogAppender() { - super("mock", null, null, false, Property.EMPTY_ARRAY); /* * We use a copy-on-write array list since log messages could be appended while we are setting up expectations. When that occurs, * we would run into a concurrent modification exception from the iteration over the expectations in #append, concurrent with a @@ -45,15 +71,16 @@ public MockLogAppender() { expectations = new CopyOnWriteArrayList<>(); } - public void addExpectation(LoggingExpectation expectation) { - expectations.add(new WrappedLoggingExpectation(expectation)); + /** + * Initialize the mock log appender with the log4j system. + */ + public static void init() { + parent.start(); + Loggers.addAppender(LogManager.getLogger(""), parent); } - @Override - public void append(LogEvent event) { - for (LoggingExpectation expectation : expectations) { - expectation.match(event); - } + public void addExpectation(LoggingExpectation expectation) { + expectations.add(new WrappedLoggingExpectation(expectation)); } public void assertAllExpectationsMatched() { @@ -213,7 +240,7 @@ public void assertMatched() { */ private static class WrappedLoggingExpectation implements LoggingExpectation { - private final AtomicBoolean assertMatchedCalled = new AtomicBoolean(false); + private volatile boolean assertMatchedCalled = false; private final LoggingExpectation delegate; private WrappedLoggingExpectation(LoggingExpectation delegate) { @@ -230,7 +257,7 @@ public void assertMatched() { try { delegate.assertMatched(); } finally { - assertMatchedCalled.set(true); + assertMatchedCalled = true; } } @@ -243,34 +270,43 @@ public String toString() { /** * Adds the list of class loggers to this {@link MockLogAppender}. * - * Stops ({@link #stop()}) and runs some checks on the {@link MockLogAppender} once the returned object is released. + * Stops and runs some checks on the {@link MockLogAppender} once the returned object is released. */ public Releasable capturing(Class... classes) { - return appendToLoggers(Arrays.stream(classes).map(LogManager::getLogger).toList()); + return appendToLoggers(Arrays.stream(classes).map(Class::getCanonicalName).toList()); } /** * Same as above except takes string class names of each logger. */ public Releasable capturing(String... names) { - return appendToLoggers(Arrays.stream(names).map(LogManager::getLogger).toList()); + return appendToLoggers(Arrays.asList(names)); } - private Releasable appendToLoggers(List loggers) { - start(); - for (final var logger : loggers) { - Loggers.addAppender(logger, this); + private Releasable appendToLoggers(List loggers) { + for (String logger : loggers) { + mockAppenders.compute(logger, (k, v) -> { + if (v == null) { + v = new CopyOnWriteArrayList<>(); + } + v.add(this); + return v; + }); } return () -> { - for (final var logger : loggers) { - Loggers.removeAppender(logger, this); + isAlive = false; + for (String logger : loggers) { + mockAppenders.compute(logger, (k, v) -> { + assert v != null; + v.remove(this); + return v.isEmpty() ? null : v; + }); } - stop(); // check that all expectations have been evaluated before this is released for (WrappedLoggingExpectation expectation : expectations) { assertThat( "Method assertMatched() not called on LoggingExpectation instance before release: " + expectation, - expectation.assertMatchedCalled.get(), + expectation.assertMatchedCalled, is(true) ); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index fd3ba7d864f99..6dfd51c0bee5e 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -105,6 +105,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Base64; +import java.util.Collection; import java.util.Collections; import java.util.EnumSet; import java.util.HashMap; @@ -261,6 +262,43 @@ protected static Set readVersionsFromNodesInfo(RestClient adminClient) t .collect(Collectors.toUnmodifiableMap(entry -> entry.getKey().toString(), entry -> (Map) entry.getValue())); } + /** + * Does the cluster being tested support the set of capabilities + * for specified path and method. + */ + protected static Optional clusterHasCapability( + String method, + String path, + Collection parameters, + Collection capabilities + ) throws IOException { + return clusterHasCapability(adminClient, method, path, parameters, capabilities); + } + + /** + * Does the cluster on the other side of {@code client} support the set + * of capabilities for specified path and method. + */ + protected static Optional clusterHasCapability( + RestClient client, + String method, + String path, + Collection parameters, + Collection capabilities + ) throws IOException { + Request request = new Request("GET", "_capabilities"); + request.addParameter("method", method); + request.addParameter("path", path); + if (parameters.isEmpty() == false) { + request.addParameter("parameters", String.join(",", parameters)); + } + if (capabilities.isEmpty() == false) { + request.addParameter("capabilities", String.join(",", capabilities)); + } + Map response = entityAsMap(client.performRequest(request).getEntity()); + return Optional.ofNullable((Boolean) response.get("supported")); + } + protected static boolean clusterHasFeature(String featureId) { return testFeatureService.clusterHasFeature(featureId); } diff --git a/test/framework/src/test/java/org/elasticsearch/test/MockLogAppenderTests.java b/test/framework/src/test/java/org/elasticsearch/test/MockLogAppenderTests.java new file mode 100644 index 0000000000000..4973bb83311bc --- /dev/null +++ b/test/framework/src/test/java/org/elasticsearch/test/MockLogAppenderTests.java @@ -0,0 +1,38 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.test; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import java.util.concurrent.atomic.AtomicBoolean; + +public class MockLogAppenderTests extends ESTestCase { + + public void testConcurrentLogAndLifecycle() throws Exception { + Logger logger = LogManager.getLogger(MockLogAppenderTests.class); + final var keepGoing = new AtomicBoolean(true); + final var logThread = new Thread(() -> { + while (keepGoing.get()) { + logger.info("test"); + } + }); + logThread.start(); + + final var appender = new MockLogAppender(); + for (int i = 0; i < 1000; i++) { + try (var ignored = appender.capturing(MockLogAppenderTests.class)) { + Thread.yield(); + } + } + + keepGoing.set(false); + logThread.join(); + } +} diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalClusterHandle.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalClusterHandle.java index 718c9c1bb0042..5292d917df630 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalClusterHandle.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalClusterHandle.java @@ -65,6 +65,11 @@ public DefaultLocalClusterHandle(String name, List nodes) { this.nodes = nodes; } + @Override + public int getNumNodes() { + return nodes.size(); + } + @Override public void start() { if (started.getAndSet(true) == false) { diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalElasticsearchCluster.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalElasticsearchCluster.java index 77b73e7b6ce86..7b24709b18a90 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalElasticsearchCluster.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalElasticsearchCluster.java @@ -54,6 +54,11 @@ public void evaluate() throws Throwable { }; } + @Override + public int getNumNodes() { + return handle.getNumNodes(); + } + @Override public void start() { checkHandle(); diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalClusterHandle.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalClusterHandle.java index 7a95d682e9ddc..acb9ef77b9e41 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalClusterHandle.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalClusterHandle.java @@ -16,6 +16,12 @@ import java.io.InputStream; public interface LocalClusterHandle extends ClusterHandle { + + /** + * Returns the number of nodes that are part of this cluster. + */ + int getNumNodes(); + /** * Stops the node at a given index. * @param index of the node to stop diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContext.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContext.java index 10bf2fb4b0a9f..4954065369ad9 100644 --- a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContext.java +++ b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContext.java @@ -16,7 +16,9 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.util.BytesRef; import org.elasticsearch.client.NodeSelector; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.util.Maps; import org.elasticsearch.test.rest.Stash; import org.elasticsearch.test.rest.TestFeatureService; import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestApi; @@ -25,14 +27,19 @@ import org.elasticsearch.xcontent.XContentType; import java.io.IOException; +import java.io.UncheckedIOException; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.Set; import java.util.function.BiPredicate; +import static java.util.Collections.emptyList; +import static java.util.Collections.emptyMap; + /** * Execution context passed across the REST tests. * Holds the REST client used to communicate with elasticsearch. @@ -122,7 +129,15 @@ public ClientYamlTestResponse callApi( ) throws IOException { // makes a copy of the parameters before modifying them for this specific request Map requestParams = new HashMap<>(params); - requestParams.putIfAbsent("error_trace", "true"); // By default ask for error traces, this my be overridden by params + requestParams.compute("error_trace", (k, v) -> { + if (v == null) { + return "true"; // By default ask for error traces, this my be overridden by params + } else if (v.equals("false")) { + return null; + } else { + return v; + } + }); for (Map.Entry entry : requestParams.entrySet()) { if (stash.containsStashedValue(entry.getValue())) { entry.setValue(stash.getValue(entry.getValue()).toString()); @@ -264,4 +279,30 @@ public ClientYamlTestCandidate getClientYamlTestCandidate() { public boolean clusterHasFeature(String featureId) { return testFeatureService.clusterHasFeature(featureId); } + + public Optional clusterHasCapabilities(String method, String path, String parametersString, String capabilitiesString) { + Map params = Maps.newMapWithExpectedSize(5); + params.put("method", method); + params.put("path", path); + if (Strings.hasLength(parametersString)) { + params.put("parameters", parametersString); + } + if (Strings.hasLength(capabilitiesString)) { + params.put("capabilities", capabilitiesString); + } + params.put("error_trace", "false"); // disable error trace + try { + ClientYamlTestResponse resp = callApi("capabilities", params, emptyList(), emptyMap()); + // anything other than 200 should result in an exception, handled below + assert resp.getStatusCode() == 200 : "Unknown response code " + resp.getStatusCode(); + return Optional.ofNullable(resp.evaluate("supported")); + } catch (ClientYamlTestResponseException responseException) { + if (responseException.getRestTestResponse().getStatusCode() / 100 == 4) { + return Optional.empty(); // we don't know, the capabilities API is unsupported + } + throw new UncheckedIOException(responseException); + } catch (IOException ioException) { + throw new UncheckedIOException(ioException); + } + } } diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/PrerequisiteSection.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/PrerequisiteSection.java index 1ee447da1f111..c12de7e1155a7 100644 --- a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/PrerequisiteSection.java +++ b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/PrerequisiteSection.java @@ -19,6 +19,7 @@ import java.io.IOException; import java.util.ArrayList; +import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.Map; @@ -27,6 +28,7 @@ import java.util.function.Predicate; import static java.util.Collections.emptyList; +import static java.util.stream.Collectors.joining; /** * Represents a section where prerequisites to run a specific test section or suite are specified. It is possible to specify preconditions @@ -43,16 +45,23 @@ record KnownIssue(String clusterFeature, String fixedBy) { private static final Set FIELD_NAMES = Set.of("cluster_feature", "fixed_by"); } + record CapabilitiesCheck(String method, String path, String parameters, String capabilities) { + private static final Set FIELD_NAMES = Set.of("method", "path", "parameters", "capabilities"); + } + static class PrerequisiteSectionBuilder { - String skipVersionRange = null; String skipReason = null; - String requiresReason = null; - List requiredYamlRunnerFeatures = new ArrayList<>(); + String skipVersionRange = null; List skipOperatingSystems = new ArrayList<>(); List skipKnownIssues = new ArrayList<>(); String skipAwaitsFix = null; Set skipClusterFeatures = new HashSet<>(); + List skipCapabilities = new ArrayList<>(); + + String requiresReason = null; + List requiredYamlRunnerFeatures = new ArrayList<>(); Set requiredClusterFeatures = new HashSet<>(); + List requiredCapabilities = new ArrayList<>(); enum XPackRequired { NOT_SPECIFIED, @@ -116,11 +125,21 @@ public PrerequisiteSectionBuilder skipKnownIssue(KnownIssue knownIssue) { return this; } + public PrerequisiteSectionBuilder skipIfCapabilities(CapabilitiesCheck capabilitiesCheck) { + skipCapabilities.add(capabilitiesCheck); + return this; + } + public PrerequisiteSectionBuilder requireClusterFeature(String featureName) { requiredClusterFeatures.add(featureName); return this; } + public PrerequisiteSectionBuilder requireCapabilities(CapabilitiesCheck capabilitiesCheck) { + requiredCapabilities.add(capabilitiesCheck); + return this; + } + public PrerequisiteSectionBuilder skipIfOs(String osName) { this.skipOperatingSystems.add(osName); return this; @@ -128,13 +147,15 @@ public PrerequisiteSectionBuilder skipIfOs(String osName) { void validate(XContentLocation contentLocation) { if ((Strings.isEmpty(skipVersionRange)) - && requiredYamlRunnerFeatures.isEmpty() && skipOperatingSystems.isEmpty() - && xpackRequired == XPackRequired.NOT_SPECIFIED - && requiredClusterFeatures.isEmpty() && skipClusterFeatures.isEmpty() + && skipCapabilities.isEmpty() && skipKnownIssues.isEmpty() - && Strings.isEmpty(skipAwaitsFix)) { + && Strings.isEmpty(skipAwaitsFix) + && xpackRequired == XPackRequired.NOT_SPECIFIED + && requiredYamlRunnerFeatures.isEmpty() + && requiredCapabilities.isEmpty() + && requiredClusterFeatures.isEmpty()) { // TODO separate the validation for requires / skip when dropping parsing of legacy fields, e.g. features in skip throw new ParsingException(contentLocation, "at least one predicate is mandatory within a skip or requires section"); } @@ -143,11 +164,12 @@ void validate(XContentLocation contentLocation) { && (Strings.isEmpty(skipVersionRange) && skipOperatingSystems.isEmpty() && skipClusterFeatures.isEmpty() + && skipCapabilities.isEmpty() && skipKnownIssues.isEmpty()) == false) { throw new ParsingException(contentLocation, "reason is mandatory within this skip section"); } - if (Strings.isEmpty(requiresReason) && (requiredClusterFeatures.isEmpty() == false)) { + if (Strings.isEmpty(requiresReason) && ((requiredClusterFeatures.isEmpty() && requiredCapabilities.isEmpty()) == false)) { throw new ParsingException(contentLocation, "reason is mandatory within this requires section"); } @@ -190,6 +212,13 @@ public PrerequisiteSection build() { if (xpackRequired == XPackRequired.YES) { requiresCriteriaList.add(Prerequisites.hasXPack()); } + if (requiredClusterFeatures.isEmpty() == false) { + requiresCriteriaList.add(Prerequisites.requireClusterFeatures(requiredClusterFeatures)); + } + if (requiredCapabilities.isEmpty() == false) { + requiresCriteriaList.add(Prerequisites.requireCapabilities(requiredCapabilities)); + } + if (xpackRequired == XPackRequired.NO) { skipCriteriaList.add(Prerequisites.hasXPack()); } @@ -199,12 +228,12 @@ public PrerequisiteSection build() { if (skipOperatingSystems.isEmpty() == false) { skipCriteriaList.add(Prerequisites.skipOnOsList(skipOperatingSystems)); } - if (requiredClusterFeatures.isEmpty() == false) { - requiresCriteriaList.add(Prerequisites.requireClusterFeatures(requiredClusterFeatures)); - } if (skipClusterFeatures.isEmpty() == false) { skipCriteriaList.add(Prerequisites.skipOnClusterFeatures(skipClusterFeatures)); } + if (skipCapabilities.isEmpty() == false) { + skipCriteriaList.add(Prerequisites.skipCapabilities(skipCapabilities)); + } if (skipKnownIssues.isEmpty() == false) { skipCriteriaList.add(Prerequisites.skipOnKnownIssue(skipKnownIssues)); } @@ -287,6 +316,7 @@ static void parseSkipSection(XContentParser parser, PrerequisiteSectionBuilder b case "os" -> parseStrings(parser, builder::skipIfOs); case "cluster_features" -> parseStrings(parser, builder::skipIfClusterFeature); case "known_issues" -> parseArray(parser, PrerequisiteSection::parseKnownIssue, builder::skipKnownIssue); + case "capabilities" -> parseArray(parser, PrerequisiteSection::parseCapabilities, builder::skipIfCapabilities); default -> false; }; } @@ -337,12 +367,47 @@ private static KnownIssue parseKnownIssue(XContentParser parser) throws IOExcept if (fields.keySet().equals(KnownIssue.FIELD_NAMES) == false) { throw new ParsingException( parser.getTokenLocation(), - Strings.format("Expected fields %s, but got %s", KnownIssue.FIELD_NAMES, fields.keySet()) + Strings.format("Expected all of %s, but got %s", KnownIssue.FIELD_NAMES, fields.keySet()) ); } return new KnownIssue(fields.get("cluster_feature"), fields.get("fixed_by")); } + private static CapabilitiesCheck parseCapabilities(XContentParser parser) throws IOException { + Map fields = parser.map(); + if (CapabilitiesCheck.FIELD_NAMES.containsAll(fields.keySet()) == false) { + throw new ParsingException( + parser.getTokenLocation(), + Strings.format("Expected some of %s, but got %s", CapabilitiesCheck.FIELD_NAMES, fields.keySet()) + ); + } + Object path = fields.get("path"); + if (path == null) { + throw new ParsingException(parser.getTokenLocation(), "path is required"); + } + + return new CapabilitiesCheck( + ensureString(ensureString(fields.getOrDefault("method", "GET"))), + ensureString(path), + stringArrayAsParamString("parameters", fields), + stringArrayAsParamString("capabilities", fields) + ); + } + + private static String ensureString(Object obj) { + if (obj instanceof String str) return str; + throw new IllegalArgumentException("Expected STRING, but got: " + obj); + } + + private static String stringArrayAsParamString(String name, Map fields) { + Object value = fields.get(name); + if (value == null) return null; + if (value instanceof Collection values) { + return values.stream().map(PrerequisiteSection::ensureString).collect(joining(",")); + } + return ensureString(value); + } + static void parseRequiresSection(XContentParser parser, PrerequisiteSectionBuilder builder) throws IOException { requireStartObject("requires", parser.nextToken()); @@ -361,6 +426,7 @@ static void parseRequiresSection(XContentParser parser, PrerequisiteSectionBuild valid = switch (parser.currentName()) { case "test_runner_features" -> parseStrings(parser, f -> parseFeatureField(f, builder)); case "cluster_features" -> parseStrings(parser, builder::requireClusterFeature); + case "capabilities" -> parseArray(parser, PrerequisiteSection::parseCapabilities, builder::requireCapabilities); default -> false; }; } diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/Prerequisites.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/Prerequisites.java index ca10101a4612c..86c035ebad62f 100644 --- a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/Prerequisites.java +++ b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/Prerequisites.java @@ -10,8 +10,11 @@ import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.yaml.ClientYamlTestExecutionContext; +import org.elasticsearch.test.rest.yaml.section.PrerequisiteSection.CapabilitiesCheck; +import org.elasticsearch.test.rest.yaml.section.PrerequisiteSection.KnownIssue; import java.util.List; +import java.util.Optional; import java.util.Set; import java.util.function.Predicate; @@ -45,8 +48,23 @@ static Predicate skipOnClusterFeatures(Set clusterFeatures.stream().anyMatch(context::clusterHasFeature); } - static Predicate skipOnKnownIssue(List knownIssues) { + static Predicate skipOnKnownIssue(List knownIssues) { return context -> knownIssues.stream() .anyMatch(i -> context.clusterHasFeature(i.clusterFeature()) && context.clusterHasFeature(i.fixedBy()) == false); } + + static Predicate requireCapabilities(List checks) { + // requirement not fulfilled if unknown / capabilities API not supported + return context -> checks.stream().allMatch(check -> checkCapabilities(context, check).orElse(false)); + } + + static Predicate skipCapabilities(List checks) { + // skip if unknown / capabilities API not supported + return context -> checks.stream().anyMatch(check -> checkCapabilities(context, check).orElse(true)); + } + + private static Optional checkCapabilities(ClientYamlTestExecutionContext context, CapabilitiesCheck check) { + Optional b = context.clusterHasCapabilities(check.method(), check.path(), check.parameters(), check.capabilities()); + return b; + } } diff --git a/test/yaml-rest-runner/src/test/java/org/elasticsearch/test/rest/yaml/section/PrerequisiteSectionTests.java b/test/yaml-rest-runner/src/test/java/org/elasticsearch/test/rest/yaml/section/PrerequisiteSectionTests.java index a77b2cc5b40f1..0bb31ae2c574a 100644 --- a/test/yaml-rest-runner/src/test/java/org/elasticsearch/test/rest/yaml/section/PrerequisiteSectionTests.java +++ b/test/yaml-rest-runner/src/test/java/org/elasticsearch/test/rest/yaml/section/PrerequisiteSectionTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.core.Strings; import org.elasticsearch.test.VersionUtils; import org.elasticsearch.test.rest.yaml.ClientYamlTestExecutionContext; +import org.elasticsearch.test.rest.yaml.section.PrerequisiteSection.CapabilitiesCheck; import org.elasticsearch.test.rest.yaml.section.PrerequisiteSection.KnownIssue; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.yaml.YamlXContent; @@ -20,8 +21,11 @@ import java.io.IOException; import java.util.List; +import java.util.Optional; import java.util.Set; +import static java.lang.Boolean.FALSE; +import static java.lang.Boolean.TRUE; import static java.util.Collections.emptyList; import static java.util.Collections.singletonList; import static org.hamcrest.Matchers.contains; @@ -36,6 +40,8 @@ import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.oneOf; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -357,8 +363,8 @@ public void testParseSkipSectionIncompleteKnownIssues() throws Exception { e.getMessage(), is( oneOf( - ("Expected fields [cluster_feature, fixed_by], but got [cluster_feature]"), - ("Expected fields [fixed_by, cluster_feature], but got [cluster_feature]") + ("Expected all of [cluster_feature, fixed_by], but got [cluster_feature]"), + ("Expected all of [fixed_by, cluster_feature], but got [cluster_feature]") ) ) ); @@ -498,6 +504,42 @@ public void testParseRequireAndSkipSectionsClusterFeatures() throws Exception { assertThat(parser.nextToken(), nullValue()); } + public void testParseRequireAndSkipSectionsCapabilities() throws Exception { + parser = createParser(YamlXContent.yamlXContent, """ + - requires: + capabilities: + - path: /a + - method: POST + path: /b + parameters: [param1, param2] + - method: PUT + path: /c + capabilities: [a, b, c] + reason: required to run test + - skip: + capabilities: + - path: /d + parameters: param1 + capabilities: a + reason: undesired if supported + """); + + var skipSectionBuilder = PrerequisiteSection.parseInternal(parser); + assertThat(skipSectionBuilder, notNullValue()); + assertThat( + skipSectionBuilder.requiredCapabilities, + contains( + new CapabilitiesCheck("GET", "/a", null, null), + new CapabilitiesCheck("POST", "/b", "param1,param2", null), + new CapabilitiesCheck("PUT", "/c", null, "a,b,c") + ) + ); + assertThat(skipSectionBuilder.skipCapabilities, contains(new CapabilitiesCheck("GET", "/d", "param1", "a"))); + + assertThat(parser.currentToken(), equalTo(XContentParser.Token.END_ARRAY)); + assertThat(parser.nextToken(), nullValue()); + } + public void testParseRequireAndSkipSectionMultipleClusterFeatures() throws Exception { parser = createParser(YamlXContent.yamlXContent, """ - requires: @@ -659,6 +701,43 @@ public void testSkipKnownIssue() { assertFalse(section.skipCriteriaMet(mockContext)); } + public void testEvaluateCapabilities() { + List skipCapabilities = List.of( + new CapabilitiesCheck("GET", "/s", null, "c1,c2"), + new CapabilitiesCheck("GET", "/s", "p1,p2", "c1") + ); + List requiredCapabilities = List.of( + new CapabilitiesCheck("GET", "/r", null, null), + new CapabilitiesCheck("GET", "/r", "p1", null) + ); + PrerequisiteSection section = new PrerequisiteSection( + List.of(Prerequisites.skipCapabilities(skipCapabilities)), + "skip", + List.of(Prerequisites.requireCapabilities(requiredCapabilities)), + "required", + emptyList() + ); + + var context = mock(ClientYamlTestExecutionContext.class); + + // when the capabilities API is unavailable: + assertTrue(section.skipCriteriaMet(context)); // always skip if unavailable + assertFalse(section.requiresCriteriaMet(context)); // always fail requirements / skip if unavailable + + when(context.clusterHasCapabilities(anyString(), anyString(), any(), any())).thenReturn(Optional.of(FALSE)); + assertFalse(section.skipCriteriaMet(context)); + assertFalse(section.requiresCriteriaMet(context)); + + when(context.clusterHasCapabilities("GET", "/s", null, "c1,c2")).thenReturn(Optional.of(TRUE)); + assertTrue(section.skipCriteriaMet(context)); + + when(context.clusterHasCapabilities("GET", "/r", null, null)).thenReturn(Optional.of(TRUE)); + assertFalse(section.requiresCriteriaMet(context)); + + when(context.clusterHasCapabilities("GET", "/r", "p1", null)).thenReturn(Optional.of(TRUE)); + assertTrue(section.requiresCriteriaMet(context)); + } + public void evaluateEmpty() { var section = new PrerequisiteSection(List.of(), "unsupported", List.of(), "required", List.of()); diff --git a/x-pack/plugin/apm-data/src/main/resources/component-templates/apm@settings.yaml b/x-pack/plugin/apm-data/src/main/resources/component-templates/apm@settings.yaml index 3ca15224dafc4..75671948de11a 100644 --- a/x-pack/plugin/apm-data/src/main/resources/component-templates/apm@settings.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/component-templates/apm@settings.yaml @@ -8,3 +8,7 @@ template: sort: field: "@timestamp" order: desc + mapping: + ignore_malformed: true + total_fields: + ignore_dynamic_beyond_limit: true diff --git a/x-pack/plugin/apm-data/src/main/resources/component-templates/metrics-apm@settings.yaml b/x-pack/plugin/apm-data/src/main/resources/component-templates/metrics-apm@settings.yaml index e6c84b6ed06f9..819d5d7eafb8e 100644 --- a/x-pack/plugin/apm-data/src/main/resources/component-templates/metrics-apm@settings.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/component-templates/metrics-apm@settings.yaml @@ -6,3 +6,9 @@ _meta: template: settings: codec: best_compression + mapping: + # apm@settings sets `ignore_malformed: true`, but we need + # to disable this for metrics since they use synthetic source, + # and this combination is incompatible with the + # aggregate_metric_double field type. + ignore_malformed: false diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/logs-apm.app@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/logs-apm.app@template.yaml index 3d9c1490e5a86..6c5d991621315 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/logs-apm.app@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/logs-apm.app@template.yaml @@ -20,5 +20,5 @@ ignore_missing_component_templates: template: settings: index: - default_pipeline: apm@default-pipeline + default_pipeline: logs-apm.app@default-pipeline final_pipeline: apm@pipeline diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/logs-apm.error@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/logs-apm.error@template.yaml index 4adcf125b2df9..6373363774602 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/logs-apm.error@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/logs-apm.error@template.yaml @@ -27,5 +27,5 @@ template: value: error settings: index: - default_pipeline: apm@default-pipeline + default_pipeline: logs-apm.error@default-pipeline final_pipeline: apm@pipeline diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.app@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.app@template.yaml index c2233469110f8..a8f3e8a4c99e3 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.app@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.app@template.yaml @@ -22,5 +22,5 @@ ignore_missing_component_templates: template: settings: index: - default_pipeline: apm@default-pipeline + default_pipeline: metrics-apm.app@default-pipeline final_pipeline: metrics-apm@pipeline diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.internal@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.internal@template.yaml index 3d6d05c58e780..1aa06a361b722 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.internal@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.internal@template.yaml @@ -23,7 +23,7 @@ ignore_missing_component_templates: template: settings: index: - default_pipeline: apm@default-pipeline + default_pipeline: metrics-apm.internal@default-pipeline final_pipeline: metrics-apm@pipeline mappings: properties: diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_destination.10m@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_destination.10m@template.yaml index f234b60b1a6ec..729110457f53e 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_destination.10m@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_destination.10m@template.yaml @@ -25,5 +25,5 @@ ignore_missing_component_templates: template: settings: index: - default_pipeline: apm@default-pipeline + default_pipeline: metrics-apm.service_destination@default-pipeline final_pipeline: metrics-apm@pipeline diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_destination.1m@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_destination.1m@template.yaml index aa4f212532e56..0e18d1cd179ef 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_destination.1m@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_destination.1m@template.yaml @@ -24,5 +24,5 @@ ignore_missing_component_templates: template: settings: index: - default_pipeline: apm@default-pipeline + default_pipeline: metrics-apm.service_destination@default-pipeline final_pipeline: metrics-apm@pipeline diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_destination.60m@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_destination.60m@template.yaml index 9b1a26486f482..d349c62e2255c 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_destination.60m@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_destination.60m@template.yaml @@ -25,5 +25,5 @@ ignore_missing_component_templates: template: settings: index: - default_pipeline: apm@default-pipeline + default_pipeline: metrics-apm.service_destination@default-pipeline final_pipeline: metrics-apm@pipeline diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_summary.10m@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_summary.10m@template.yaml index c37ec93651d9d..f71a4c70abde7 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_summary.10m@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_summary.10m@template.yaml @@ -25,5 +25,5 @@ ignore_missing_component_templates: template: settings: index: - default_pipeline: apm@default-pipeline + default_pipeline: metrics-apm.service_summary@default-pipeline final_pipeline: metrics-apm@pipeline diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_summary.1m@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_summary.1m@template.yaml index 3a99bc8472c66..218fbb2eaac87 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_summary.1m@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_summary.1m@template.yaml @@ -24,5 +24,5 @@ ignore_missing_component_templates: template: settings: index: - default_pipeline: apm@default-pipeline + default_pipeline: metrics-apm.service_summary@default-pipeline final_pipeline: metrics-apm@pipeline diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_summary.60m@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_summary.60m@template.yaml index d829967f7eddf..9421b8e2f1fce 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_summary.60m@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_summary.60m@template.yaml @@ -25,5 +25,5 @@ ignore_missing_component_templates: template: settings: index: - default_pipeline: apm@default-pipeline + default_pipeline: metrics-apm.service_summary@default-pipeline final_pipeline: metrics-apm@pipeline diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_transaction.10m@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_transaction.10m@template.yaml index bc21b35d4777f..5e8b7e94673f4 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_transaction.10m@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_transaction.10m@template.yaml @@ -25,5 +25,5 @@ ignore_missing_component_templates: template: settings: index: - default_pipeline: apm@default-pipeline + default_pipeline: metrics-apm.service_transaction@default-pipeline final_pipeline: metrics-apm@pipeline diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_transaction.1m@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_transaction.1m@template.yaml index 87a1e254baea7..c51bd79c6513d 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_transaction.1m@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_transaction.1m@template.yaml @@ -24,5 +24,5 @@ ignore_missing_component_templates: template: settings: index: - default_pipeline: apm@default-pipeline + default_pipeline: metrics-apm.service_transaction@default-pipeline final_pipeline: metrics-apm@pipeline diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_transaction.60m@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_transaction.60m@template.yaml index b45ce0ec0fad7..22e56fd7cabca 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_transaction.60m@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_transaction.60m@template.yaml @@ -25,5 +25,5 @@ ignore_missing_component_templates: template: settings: index: - default_pipeline: apm@default-pipeline + default_pipeline: metrics-apm.service_transaction@default-pipeline final_pipeline: metrics-apm@pipeline diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.transaction.10m@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.transaction.10m@template.yaml index 51d3c90cb4af8..6b4102bb673b8 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.transaction.10m@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.transaction.10m@template.yaml @@ -25,5 +25,5 @@ ignore_missing_component_templates: template: settings: index: - default_pipeline: apm@default-pipeline + default_pipeline: metrics-apm.transaction@default-pipeline final_pipeline: metrics-apm@pipeline diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.transaction.1m@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.transaction.1m@template.yaml index 8825a93db28dc..7b10125fbce99 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.transaction.1m@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.transaction.1m@template.yaml @@ -24,5 +24,5 @@ ignore_missing_component_templates: template: settings: index: - default_pipeline: apm@default-pipeline + default_pipeline: metrics-apm.transaction@default-pipeline final_pipeline: metrics-apm@pipeline diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.transaction.60m@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.transaction.60m@template.yaml index e6657fbfe5d28..62359a8729f08 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.transaction.60m@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.transaction.60m@template.yaml @@ -25,5 +25,5 @@ ignore_missing_component_templates: template: settings: index: - default_pipeline: apm@default-pipeline + default_pipeline: metrics-apm.transaction@default-pipeline final_pipeline: metrics-apm@pipeline diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/traces-apm.rum@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/traces-apm.rum@template.yaml index 174aec8c5515a..4f4d9a6a7e7d8 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/traces-apm.rum@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/traces-apm.rum@template.yaml @@ -23,7 +23,7 @@ ignore_missing_component_templates: template: settings: index: - default_pipeline: apm@default-pipeline + default_pipeline: traces-apm.rum@default-pipeline final_pipeline: traces-apm@pipeline mappings: properties: diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/traces-apm@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/traces-apm@template.yaml index de9c47dfd3f1b..e5c2ef8d57471 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/traces-apm@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/traces-apm@template.yaml @@ -22,7 +22,7 @@ ignore_missing_component_templates: template: settings: index: - default_pipeline: apm@default-pipeline + default_pipeline: traces-apm@default-pipeline final_pipeline: traces-apm@pipeline mappings: properties: diff --git a/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/apm@default-pipeline.yaml b/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/apm@default-pipeline.yaml deleted file mode 100644 index 65d8840e8f713..0000000000000 --- a/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/apm@default-pipeline.yaml +++ /dev/null @@ -1,56 +0,0 @@ ---- -version: ${xpack.apmdata.template.version} -_meta: - managed: true -description: | - Built-in default ingest pipeline for all APM data streams. - - This pipeline exists purely for routing, which cannot be - performed in a final pipeline, and for invoking user-defined - custom pipelines. All built-in processing occurs in the final - pipelines. -processors: - # Older versions of apm-server write various metrics to the - # metrics-apm.internal data stream, which newer versions break - # into separate datasets. We reroute these metrics coming from - # older versions of apm-server based on 'metricset.name'. -- set: - if: | - (ctx.data_stream?.dataset == 'apm.internal' || ctx['data_stream.dataset'] == 'apm.internal') && - (ctx.metricset?.name == 'transaction' || ctx.metricset?.name == 'service_destination') - field: metricset.interval - value: 1m - override: false -- reroute: - if: | - (ctx.data_stream?.dataset == 'apm.internal' || ctx['data_stream.dataset'] == 'apm.internal') && - (ctx.metricset?.name == 'transaction') - dataset: apm.transaction.1m -- reroute: - if: | - (ctx.data_stream?.dataset == 'apm.internal' || ctx['data_stream.dataset'] == 'apm.internal') && - (ctx.metricset?.name == 'service_destination') - dataset: apm.service_destination.1m - -# Invoke user-defined custom pipelines, in ascending order of specificity: -- pipeline: - name: global@custom - ignore_missing_pipeline: true -- pipeline: - name: "{{{data_stream.type}}}@custom" - ignore_missing_pipeline: true -- pipeline: - if: "ctx?.data_stream?.dataset != 'apm'" - name: "{{{data_stream.type}}}-apm@custom" - ignore_missing_pipeline: true -- pipeline: - # (logs|metrics)-apm.app.-* should invoke (logs|metrics)-apm.app@custom, - # i.e. excluding service.name from the dataset. - if: "ctx.data_stream?.dataset != null && ctx.data_stream?.dataset.startsWith('apm.app.')" - name: "{{{data_stream.type}}}-apm.app@custom" - ignore_missing_pipeline: true -- pipeline: - # other data streams should include the whole dataset. - if: "ctx.data_stream?.dataset != null && !ctx.data_stream?.dataset.startsWith('apm.app.')" - name: "{{{data_stream.type}}}-{{{data_stream.dataset}}}@custom" - ignore_missing_pipeline: true diff --git a/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/logs-apm.app@default-pipeline.yaml b/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/logs-apm.app@default-pipeline.yaml new file mode 100644 index 0000000000000..a1f9565676fd4 --- /dev/null +++ b/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/logs-apm.app@default-pipeline.yaml @@ -0,0 +1,22 @@ +--- +version: ${xpack.apmdata.template.version} +_meta: + managed: true +description: | + Built-in default ingest pipeline for logs-apm.app.*-* data streams. + + This pipeline exists purely for routing, which cannot be + performed in a final pipeline, and for invoking user-defined + custom pipelines. All built-in processing occurs in the final + pipelines. +processors: +# Invoke user-defined custom pipelines, in ascending order of specificity: +- pipeline: + name: global@custom + ignore_missing_pipeline: true +- pipeline: + name: logs@custom + ignore_missing_pipeline: true +- pipeline: + name: logs-apm.app@custom + ignore_missing_pipeline: true diff --git a/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/logs-apm.error@default-pipeline.yaml b/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/logs-apm.error@default-pipeline.yaml new file mode 100644 index 0000000000000..c46a1c1b44f96 --- /dev/null +++ b/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/logs-apm.error@default-pipeline.yaml @@ -0,0 +1,22 @@ +--- +version: ${xpack.apmdata.template.version} +_meta: + managed: true +description: | + Built-in default ingest pipeline for logs-apm.error-* data streams. + + This pipeline exists purely for routing, which cannot be + performed in a final pipeline, and for invoking user-defined + custom pipelines. All built-in processing occurs in the final + pipelines. +processors: +# Invoke user-defined custom pipelines, in ascending order of specificity: +- pipeline: + name: global@custom + ignore_missing_pipeline: true +- pipeline: + name: logs@custom + ignore_missing_pipeline: true +- pipeline: + name: logs-apm.error@custom + ignore_missing_pipeline: true diff --git a/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/metrics-apm.app@default-pipeline.yaml b/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/metrics-apm.app@default-pipeline.yaml new file mode 100644 index 0000000000000..bc07840727cca --- /dev/null +++ b/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/metrics-apm.app@default-pipeline.yaml @@ -0,0 +1,22 @@ +--- +version: ${xpack.apmdata.template.version} +_meta: + managed: true +description: | + Built-in default ingest pipeline for metrics-apm.app.*-* data streams. + + This pipeline exists purely for routing, which cannot be + performed in a final pipeline, and for invoking user-defined + custom pipelines. All built-in processing occurs in the final + pipelines. +processors: +# Invoke user-defined custom pipelines, in ascending order of specificity: +- pipeline: + name: global@custom + ignore_missing_pipeline: true +- pipeline: + name: metrics@custom + ignore_missing_pipeline: true +- pipeline: + name: metrics-apm.app@custom + ignore_missing_pipeline: true diff --git a/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/metrics-apm.internal@default-pipeline.yaml b/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/metrics-apm.internal@default-pipeline.yaml new file mode 100644 index 0000000000000..247ee4cae67f0 --- /dev/null +++ b/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/metrics-apm.internal@default-pipeline.yaml @@ -0,0 +1,38 @@ +--- +version: ${xpack.apmdata.template.version} +_meta: + managed: true +description: | + Built-in default ingest pipeline for metrics-apm.internal-* data streams. + + This pipeline exists purely for routing, which cannot be + performed in a final pipeline, and for invoking user-defined + custom pipelines. All built-in processing occurs in the final + pipelines. +processors: + # Older versions of apm-server write various metrics to the + # metrics-apm.internal data stream, which newer versions break + # into separate datasets. We reroute these metrics coming from + # older versions of apm-server based on 'metricset.name'. +- set: + if: "ctx.metricset?.name == 'transaction' || ctx.metricset?.name == 'service_destination'" + field: metricset.interval + value: 1m + override: false +- reroute: + if: "ctx.metricset?.name == 'transaction'" + dataset: apm.transaction.1m +- reroute: + if: "ctx.metricset?.name == 'service_destination'" + dataset: apm.service_destination.1m + +# Invoke user-defined custom pipelines, in ascending order of specificity: +- pipeline: + name: global@custom + ignore_missing_pipeline: true +- pipeline: + name: metrics@custom + ignore_missing_pipeline: true +- pipeline: + name: metrics-apm.internal@custom + ignore_missing_pipeline: true diff --git a/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/metrics-apm.service_destination@default-pipeline.yaml b/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/metrics-apm.service_destination@default-pipeline.yaml new file mode 100644 index 0000000000000..d8912fc2dd220 --- /dev/null +++ b/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/metrics-apm.service_destination@default-pipeline.yaml @@ -0,0 +1,23 @@ +--- +version: ${xpack.apmdata.template.version} +_meta: + managed: true +description: | + Built-in default ingest pipeline for metrics-apm.service_destination.*-* + data streams. + + This pipeline exists purely for routing, which cannot be + performed in a final pipeline, and for invoking user-defined + custom pipelines. All built-in processing occurs in the final + pipelines. +processors: +# Invoke user-defined custom pipelines, in ascending order of specificity: +- pipeline: + name: global@custom + ignore_missing_pipeline: true +- pipeline: + name: metrics@custom + ignore_missing_pipeline: true +- pipeline: + name: metrics-apm.service_destination@custom + ignore_missing_pipeline: true diff --git a/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/metrics-apm.service_summary@default-pipeline.yaml b/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/metrics-apm.service_summary@default-pipeline.yaml new file mode 100644 index 0000000000000..4cf5652e46bf4 --- /dev/null +++ b/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/metrics-apm.service_summary@default-pipeline.yaml @@ -0,0 +1,23 @@ +--- +version: ${xpack.apmdata.template.version} +_meta: + managed: true +description: | + Built-in default ingest pipeline for metrics-apm.service_summary.*-* + data streams. + + This pipeline exists purely for routing, which cannot be + performed in a final pipeline, and for invoking user-defined + custom pipelines. All built-in processing occurs in the final + pipelines. +processors: +# Invoke user-defined custom pipelines, in ascending order of specificity: +- pipeline: + name: global@custom + ignore_missing_pipeline: true +- pipeline: + name: metrics@custom + ignore_missing_pipeline: true +- pipeline: + name: metrics-apm.service_summary@custom + ignore_missing_pipeline: true diff --git a/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/metrics-apm.service_transaction@default-pipeline.yaml b/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/metrics-apm.service_transaction@default-pipeline.yaml new file mode 100644 index 0000000000000..44ab85998cee7 --- /dev/null +++ b/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/metrics-apm.service_transaction@default-pipeline.yaml @@ -0,0 +1,23 @@ +--- +version: ${xpack.apmdata.template.version} +_meta: + managed: true +description: | + Built-in default ingest pipeline for metrics-apm.service_transaction.*-* + data streams. + + This pipeline exists purely for routing, which cannot be + performed in a final pipeline, and for invoking user-defined + custom pipelines. All built-in processing occurs in the final + pipelines. +processors: +# Invoke user-defined custom pipelines, in ascending order of specificity: +- pipeline: + name: "global@custom" + ignore_missing_pipeline: true +- pipeline: + name: "metrics@custom" + ignore_missing_pipeline: true +- pipeline: + name: "metrics-apm.service_transaction@custom" + ignore_missing_pipeline: true diff --git a/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/metrics-apm.transaction@default-pipeline.yaml b/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/metrics-apm.transaction@default-pipeline.yaml new file mode 100644 index 0000000000000..12e58e6747b5a --- /dev/null +++ b/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/metrics-apm.transaction@default-pipeline.yaml @@ -0,0 +1,23 @@ +--- +version: ${xpack.apmdata.template.version} +_meta: + managed: true +description: | + Built-in default ingest pipeline for metrics-apm.transaction.*-* + data streams. + + This pipeline exists purely for routing, which cannot be + performed in a final pipeline, and for invoking user-defined + custom pipelines. All built-in processing occurs in the final + pipelines. +processors: +# Invoke user-defined custom pipelines, in ascending order of specificity: +- pipeline: + name: global@custom + ignore_missing_pipeline: true +- pipeline: + name: metrics@custom + ignore_missing_pipeline: true +- pipeline: + name: metrics-apm.transaction@custom + ignore_missing_pipeline: true diff --git a/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/traces-apm.rum@default-pipeline.yaml b/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/traces-apm.rum@default-pipeline.yaml new file mode 100644 index 0000000000000..b1ce73308c5bc --- /dev/null +++ b/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/traces-apm.rum@default-pipeline.yaml @@ -0,0 +1,22 @@ +--- +version: ${xpack.apmdata.template.version} +_meta: + managed: true +description: | + Built-in default ingest pipeline for traces-apm.rum-* data streams. + + This pipeline exists purely for routing, which cannot be + performed in a final pipeline, and for invoking user-defined + custom pipelines. All built-in processing occurs in the final + pipeline. +processors: +# Invoke user-defined custom pipelines, in ascending order of specificity: +- pipeline: + name: global@custom + ignore_missing_pipeline: true +- pipeline: + name: traces@custom + ignore_missing_pipeline: true +- pipeline: + name: traces-apm.rum@custom + ignore_missing_pipeline: true diff --git a/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/traces-apm@default-pipeline.yaml b/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/traces-apm@default-pipeline.yaml new file mode 100644 index 0000000000000..039b6dccf7d57 --- /dev/null +++ b/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/traces-apm@default-pipeline.yaml @@ -0,0 +1,22 @@ +--- +version: ${xpack.apmdata.template.version} +_meta: + managed: true +description: | + Built-in default ingest pipeline for traces-apm-* data streams. + + This pipeline exists purely for routing, which cannot be + performed in a final pipeline, and for invoking user-defined + custom pipelines. All built-in processing occurs in the final + pipeline. +processors: +# Invoke user-defined custom pipelines, in ascending order of specificity: +- pipeline: + name: global@custom + ignore_missing_pipeline: true +- pipeline: + name: traces@custom + ignore_missing_pipeline: true +- pipeline: + name: traces-apm@custom + ignore_missing_pipeline: true diff --git a/x-pack/plugin/apm-data/src/main/resources/resources.yaml b/x-pack/plugin/apm-data/src/main/resources/resources.yaml index 2f2025c37f70f..772057d4931a3 100644 --- a/x-pack/plugin/apm-data/src/main/resources/resources.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/resources.yaml @@ -1,7 +1,7 @@ # "version" holds the version of the templates and ingest pipelines installed # by xpack-plugin apm-data. This must be increased whenever an existing template or # pipeline is changed, in order for it to be updated on Elasticsearch upgrade. -version: 2 +version: 4 component-templates: # Data lifecycle. @@ -49,7 +49,27 @@ index-templates: # Ingest pipeline configuration requires to manually specify pipeline dependencies ingest-pipelines: - - apm@default-pipeline: {} + # Default pipelines. + # + # Each data stream index template gets its own default pipeline, + # with the exception of the interval data streams which share one + # for all intervals of the same metric, and the sampled traces + # data stream which does not have (or need) one. + - logs-apm.app@default-pipeline: {} + - logs-apm.error@default-pipeline: {} + - metrics-apm.app@default-pipeline: {} + - metrics-apm.internal@default-pipeline: + dependencies: + - metrics-apm.service_destination@default-pipeline + - metrics-apm.transaction@default-pipeline + - metrics-apm.service_destination@default-pipeline: {} + - metrics-apm.service_summary@default-pipeline: {} + - metrics-apm.service_transaction@default-pipeline: {} + - metrics-apm.transaction@default-pipeline: {} + - traces-apm@default-pipeline: {} + - traces-apm.rum@default-pipeline: {} + + # Final pipelines. - apm@pipeline: {} - traces-apm@pipeline: dependencies: diff --git a/x-pack/plugin/apm-data/src/test/java/org/elasticsearch/xpack/apmdata/APMIndexTemplateRegistryTests.java b/x-pack/plugin/apm-data/src/test/java/org/elasticsearch/xpack/apmdata/APMIndexTemplateRegistryTests.java index 4f6a5b58ff38d..8228d7011c9c1 100644 --- a/x-pack/plugin/apm-data/src/test/java/org/elasticsearch/xpack/apmdata/APMIndexTemplateRegistryTests.java +++ b/x-pack/plugin/apm-data/src/test/java/org/elasticsearch/xpack/apmdata/APMIndexTemplateRegistryTests.java @@ -322,6 +322,36 @@ public void testIndexTemplateConventions() throws Exception { .filter(t -> t.endsWith("@custom")) .toList(); assertThat(requiredCustomComponentTemplates, empty()); + + final Settings settings = template.template().settings(); + if (namePrefix.equals("traces-apm.sampled")) { + // traces-apm.sampled does not have any ingest pipelines. + assertThat(settings, equalTo(null)); + } else { + final boolean isIntervalDataStream = dataStreamType.equals("metrics") && namePrefix.matches(".*\\.[0-9]+m"); + final String defaultPipeline = settings.get("index.default_pipeline"); + if (isIntervalDataStream) { + // e.g. metrics-apm.service_transaction.10m should call + // metrics-apm.service_transaction@default-pipeline + final String withoutInterval = namePrefix.substring(0, namePrefix.lastIndexOf('.')); + assertThat(defaultPipeline, equalTo(withoutInterval + "@default-pipeline")); + } else { + // All other data streams should call a default pipeline + // specific to the data stream. + assertThat(defaultPipeline, equalTo(namePrefix + "@default-pipeline")); + break; + } + + final String finalPipeline = settings.get("index.final_pipeline"); + switch (dataStreamType) { + case "metrics", "traces": + assertThat(finalPipeline, equalTo(dataStreamType + "-apm@pipeline")); + break; + default: + assertThat(finalPipeline, equalTo("apm@pipeline")); + break; + } + } } } diff --git a/x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/30_custom_pipelines.yml b/x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/30_custom_pipelines.yml index 8a039e7b4eb1d..339b3b56462ac 100644 --- a/x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/30_custom_pipelines.yml +++ b/x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/30_custom_pipelines.yml @@ -22,6 +22,51 @@ setup: ] } + - do: + ingest.put_pipeline: + id: "logs@custom" + body: > + { + "processors": [ + { + "set" : { + "field": "custom_pipelines", + "value": "{{{custom_pipelines}}},{{{_ingest.pipeline}}}" + } + } + ] + } + + - do: + ingest.put_pipeline: + id: "logs-apm.app@custom" + body: > + { + "processors": [ + { + "set" : { + "field": "custom_pipelines", + "value": "{{{custom_pipelines}}},{{{_ingest.pipeline}}}" + } + } + ] + } + + - do: + ingest.put_pipeline: + id: "logs-apm.error@custom" + body: > + { + "processors": [ + { + "set" : { + "field": "custom_pipelines", + "value": "{{{custom_pipelines}}},{{{_ingest.pipeline}}}" + } + } + ] + } + - do: ingest.put_pipeline: id: "metrics@custom" @@ -39,7 +84,7 @@ setup: - do: ingest.put_pipeline: - id: "metrics-apm@custom" + id: "metrics-apm.internal@custom" body: > { "processors": [ @@ -67,6 +112,66 @@ setup: ] } + - do: + ingest.put_pipeline: + id: "metrics-apm.service_destination@custom" + body: > + { + "processors": [ + { + "set" : { + "field": "custom_pipelines", + "value": "{{{custom_pipelines}}},{{{_ingest.pipeline}}}" + } + } + ] + } + + - do: + ingest.put_pipeline: + id: "metrics-apm.service_summary@custom" + body: > + { + "processors": [ + { + "set" : { + "field": "custom_pipelines", + "value": "{{{custom_pipelines}}},{{{_ingest.pipeline}}}" + } + } + ] + } + + - do: + ingest.put_pipeline: + id: "metrics-apm.service_transaction@custom" + body: > + { + "processors": [ + { + "set" : { + "field": "custom_pipelines", + "value": "{{{custom_pipelines}}},{{{_ingest.pipeline}}}" + } + } + ] + } + + - do: + ingest.put_pipeline: + id: "metrics-apm.transaction@custom" + body: > + { + "processors": [ + { + "set" : { + "field": "custom_pipelines", + "value": "{{{custom_pipelines}}},{{{_ingest.pipeline}}}" + } + } + ] + } + - do: ingest.put_pipeline: id: "traces@custom" @@ -97,42 +202,114 @@ setup: ] } + - do: + ingest.put_pipeline: + id: "traces-apm.rum@custom" + body: > + { + "processors": [ + { + "set" : { + "field": "custom_pipelines", + "value": "{{{custom_pipelines}}},{{{_ingest.pipeline}}}" + } + } + ] + } + --- -"Test metrics @custom ingest pipelines": +"Test logs @custom ingest pipelines": - do: bulk: - index: metrics-apm.app.svc1-testing refresh: true body: - - create: {} - - '{"@timestamp": "2017-06-22", "data_stream": {"type": "metrics", "dataset": "apm.app.svc1", "namespace": "testing"}}' + - create: {"_index": "logs-apm.app.svc1-testing"} + - '{"@timestamp": "2017-06-22", "data_stream": {"type": "logs", "dataset": "apm.app.svc1", "namespace": "testing"}}' + - create: {"_index": "logs-apm.error-testing"} + - '{"@timestamp": "2017-06-22", "data_stream": {"type": "logs", "dataset": "apm.error", "namespace": "testing"}}' - is_false: errors + - do: { search: { index: logs-apm.app.svc1-testing } } + - length: { hits.hits: 1 } + - match: + hits.hits.0._source.custom_pipelines: ",global@custom,logs@custom,logs-apm.app@custom" + + - do: { search: { index: logs-apm.error-testing } } + - length: { hits.hits: 1 } + - match: + hits.hits.0._source.custom_pipelines: ",global@custom,logs@custom,logs-apm.error@custom" + +--- +"Test metrics @custom ingest pipelines": - do: - search: - index: metrics-apm.app.svc1-testing + bulk: + refresh: true body: - fields: ["custom_pipelines"] + - create: {"_index": "metrics-apm.app.svc1-testing"} + - '{"@timestamp": "2017-06-22", "data_stream": {"type": "metrics", "dataset": "apm.app.svc1", "namespace": "testing"}}' + - create: {"_index": "metrics-apm.internal-testing"} + - '{"@timestamp": "2017-06-22", "data_stream": {"type": "metrics", "dataset": "apm.internal", "namespace": "testing"}}' + - create: {"_index": "metrics-apm.service_destination.1m-testing"} + - '{"@timestamp": "2017-06-22", "data_stream": {"type": "metrics", "dataset": "apm.service_destination.1m", "namespace": "testing"}}' + - create: {"_index": "metrics-apm.service_summary.1m-testing"} + - '{"@timestamp": "2017-06-22", "data_stream": {"type": "metrics", "dataset": "apm.service_summary.1m", "namespace": "testing"}}' + - create: {"_index": "metrics-apm.service_transaction.1m-testing"} + - '{"@timestamp": "2017-06-22", "data_stream": {"type": "metrics", "dataset": "apm.service_transaction.1m", "namespace": "testing"}}' + - create: {"_index": "metrics-apm.transaction.1m-testing"} + - '{"@timestamp": "2017-06-22", "data_stream": {"type": "metrics", "dataset": "apm.transaction.1m", "namespace": "testing"}}' + + - is_false: errors + + - do: { search: { index: metrics-apm.app.svc1-testing } } + - length: { hits.hits: 1 } + - match: + hits.hits.0._source.custom_pipelines: ",global@custom,metrics@custom,metrics-apm.app@custom" + + - do: { search: { index: metrics-apm.internal-testing } } + - length: { hits.hits: 1 } + - match: + hits.hits.0._source.custom_pipelines: ",global@custom,metrics@custom,metrics-apm.internal@custom" + + - do: { search: { index: metrics-apm.service_destination.1m-testing } } + - length: { hits.hits: 1 } + - match: + hits.hits.0._source.custom_pipelines: ",global@custom,metrics@custom,metrics-apm.service_destination@custom" + + - do: { search: { index: metrics-apm.service_summary.1m-testing } } + - length: { hits.hits: 1 } + - match: + hits.hits.0._source.custom_pipelines: ",global@custom,metrics@custom,metrics-apm.service_summary@custom" + + - do: { search: { index: metrics-apm.service_transaction.1m-testing } } + - length: { hits.hits: 1 } + - match: + hits.hits.0._source.custom_pipelines: ",global@custom,metrics@custom,metrics-apm.service_transaction@custom" + + - do: { search: { index: metrics-apm.transaction.1m-testing } } - length: { hits.hits: 1 } - match: - hits.hits.0._source.custom_pipelines: ",global@custom,metrics@custom,metrics-apm@custom,metrics-apm.app@custom" + hits.hits.0._source.custom_pipelines: ",global@custom,metrics@custom,metrics-apm.transaction@custom" --- "Test traces @custom ingest pipelines": - do: bulk: - index: traces-apm-testing refresh: true body: - - create: {} + - create: {"_index": "traces-apm-testing"} - '{"@timestamp": "2017-06-22", "data_stream": {"type": "traces", "dataset": "apm", "namespace": "testing"}}' + - create: {"_index": "traces-apm.rum-testing"} + - '{"@timestamp": "2017-06-22", "data_stream": {"type": "traces", "dataset": "apm.rum", "namespace": "testing"}}' - is_false: errors - - do: - search: - index: traces-apm-testing + - do: { search: { index: traces-apm-testing } } - length: { hits.hits: 1 } - match: hits.hits.0._source.custom_pipelines: ",global@custom,traces@custom,traces-apm@custom" + + - do: { search: { index: traces-apm.rum-testing } } + - length: { hits.hits: 1 } + - match: + hits.hits.0._source.custom_pipelines: ",global@custom,traces@custom,traces-apm.rum@custom" diff --git a/x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/30_lenient_mappings.yml b/x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/30_lenient_mappings.yml new file mode 100644 index 0000000000000..97265a9b81a75 --- /dev/null +++ b/x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/30_lenient_mappings.yml @@ -0,0 +1,100 @@ +--- +setup: + - do: + cluster.health: + wait_for_events: languid + + - do: + cluster.put_component_template: + name: "logs-apm.app@custom" + body: + template: + settings: + mapping: + total_fields: + limit: 20 + +--- +"Test ignore_malformed": + - do: + bulk: + index: traces-apm-testing + refresh: true + body: + # Passing a (non-coercable) string into a numeric field should not + # cause an indexing failure; it should just not be indexed. + - create: {} + - '{"@timestamp": "2017-06-22", "numeric_labels": {"key": "string"}}' + - create: {} + - '{"@timestamp": "2017-06-22", "numeric_labels": {"key": 123}}' + + - is_false: errors + + - do: + search: + index: traces-apm-testing + body: + fields: ["numeric_labels.*", "_ignored"] + - length: { hits.hits: 2 } + - match: { hits.hits.0.fields: {"_ignored": ["numeric_labels.key"]} } + - match: { hits.hits.1.fields: {"numeric_labels.key": [123.0]} } + +--- +"Test ignore_dynamic_beyond_limit": + - do: + bulk: + index: logs-apm.app.svc1-testing + refresh: true + body: + - create: {} + - {"@timestamp": "2017-06-22", "k1": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k2": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k3": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k4": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k5": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k6": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k7": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k8": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k9": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k10": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k11": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k12": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k13": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k14": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k15": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k16": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k17": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k18": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k19": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k20": ""} + + - is_false: errors + + - do: + search: + index: logs-apm.app.svc1-testing + body: + query: + term: + _ignored: + value: k20 + - length: { hits.hits: 1 } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/GetAsyncStatusRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/GetAsyncStatusRequest.java index 9a623ede96f02..3581b9db19887 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/GetAsyncStatusRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/GetAsyncStatusRequest.java @@ -34,7 +34,7 @@ public GetAsyncStatusRequest(String id) { public GetAsyncStatusRequest(StreamInput in) throws IOException { super(in); this.id = in.readString(); - if (in.getTransportVersion().onOrAfter(TransportVersions.ASYNC_SEARCH_STATUS_SUPPORTS_KEEP_ALIVE)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { this.keepAlive = in.readTimeValue(); } } @@ -43,7 +43,7 @@ public GetAsyncStatusRequest(StreamInput in) throws IOException { public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeString(id); - if (out.getTransportVersion().onOrAfter(TransportVersions.ASYNC_SEARCH_STATUS_SUPPORTS_KEEP_ALIVE)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeTimeValue(keepAlive); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/datatiers/NodesDataTiersUsageTransportAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/datatiers/NodesDataTiersUsageTransportAction.java index 08a2d5ae4f5b4..ee721d9d55714 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/datatiers/NodesDataTiersUsageTransportAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/datatiers/NodesDataTiersUsageTransportAction.java @@ -90,7 +90,7 @@ protected NodesResponse newResponse(NodesRequest request, List remoteIndicesPrivileges = new ArrayList<>(); private RemoteClusterPermissions remoteClusterPermissions = RemoteClusterPermissions.NONE; private boolean restrictRequest = false; + private String description; public PutRoleRequest() {} @@ -63,6 +64,10 @@ public void name(String name) { this.name = name; } + public void description(String description) { + this.description = description; + } + public void cluster(String... clusterPrivilegesArray) { this.clusterPrivileges = clusterPrivilegesArray; } @@ -164,6 +169,10 @@ public String name() { return name; } + public String description() { + return description; + } + public String[] cluster() { return clusterPrivileges; } @@ -213,7 +222,8 @@ public RoleDescriptor roleDescriptor() { Collections.emptyMap(), remoteIndicesPrivileges.toArray(new RoleDescriptor.RemoteIndicesPrivileges[0]), remoteClusterPermissions, - null + null, + description ); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequestBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequestBuilder.java index daf485814c799..486a347775264 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequestBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequestBuilder.java @@ -21,7 +21,7 @@ */ public class PutRoleRequestBuilder extends ActionRequestBuilder { - private static final RoleDescriptor.Parser ROLE_DESCRIPTOR_PARSER = RoleDescriptor.parserBuilder().build(); + private static final RoleDescriptor.Parser ROLE_DESCRIPTOR_PARSER = RoleDescriptor.parserBuilder().allowDescription(true).build(); public PutRoleRequestBuilder(ElasticsearchClient client) { super(client, PutRoleAction.INSTANCE, new PutRoleRequest()); @@ -43,6 +43,7 @@ public PutRoleRequestBuilder source(String name, BytesReference source, XContent request.addApplicationPrivileges(descriptor.getApplicationPrivileges()); request.runAs(descriptor.getRunAs()); request.metadata(descriptor.getMetadata()); + request.description(descriptor.getDescription()); return this; } @@ -51,6 +52,11 @@ public PutRoleRequestBuilder name(String name) { return this; } + public PutRoleRequestBuilder description(String description) { + request.description(description); + return this; + } + public PutRoleRequestBuilder cluster(String... cluster) { request.cluster(cluster); return this; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/RoleDescriptorRequestValidator.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/RoleDescriptorRequestValidator.java index 472faee97a707..ec8fcd1c421ef 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/RoleDescriptorRequestValidator.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/RoleDescriptorRequestValidator.java @@ -14,6 +14,7 @@ import org.elasticsearch.xpack.core.security.authz.privilege.IndexPrivilege; import org.elasticsearch.xpack.core.security.authz.restriction.WorkflowResolver; import org.elasticsearch.xpack.core.security.support.MetadataUtils; +import org.elasticsearch.xpack.core.security.support.Validation; import java.util.Arrays; import java.util.Set; @@ -102,6 +103,12 @@ public static ActionRequestValidationException validate( } } } + if (roleDescriptor.hasDescription()) { + Validation.Error error = Validation.Roles.validateRoleDescription(roleDescriptor.getDescription()); + if (error != null) { + validationException = addValidationError(error.toString(), validationException); + } + } return validationException; } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingRequest.java index f85ca260c3fff..039ed8aa5fb64 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingRequest.java @@ -166,4 +166,16 @@ public void writeTo(StreamOutput out) throws IOException { public ExpressionRoleMapping getMapping() { return new ExpressionRoleMapping(name, rules, roles, roleTemplates, metadata, enabled); } + + public static PutRoleMappingRequest fromMapping(ExpressionRoleMapping mapping) { + var request = new PutRoleMappingRequest(); + request.setName(mapping.getName()); + request.setEnabled(mapping.isEnabled()); + request.setRoles(mapping.getRoles()); + request.setRoleTemplates(mapping.getRoleTemplates()); + request.setRules(mapping.getExpression()); + request.setMetadata(mapping.getMetadata()); + + return request; + } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingRequestBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingRequestBuilder.java index d46c21f080308..88a930063190b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingRequestBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingRequestBuilder.java @@ -9,7 +9,8 @@ import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.support.WriteRequestBuilder; import org.elasticsearch.client.internal.ElasticsearchClient; -import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping; import org.elasticsearch.xpack.core.security.authc.support.mapper.TemplateRoleName; import org.elasticsearch.xpack.core.security.authc.support.mapper.expressiondsl.RoleMapperExpression; @@ -34,8 +35,8 @@ public PutRoleMappingRequestBuilder(ElasticsearchClient client) { /** * Populate the put role request from the source and the role's name */ - public PutRoleMappingRequestBuilder source(String name, XContentParser parser) throws IOException { - ExpressionRoleMapping mapping = ExpressionRoleMapping.parse(name, parser); + public PutRoleMappingRequestBuilder source(String name, BytesReference source, XContentType xContentType) throws IOException { + ExpressionRoleMapping mapping = ExpressionRoleMapping.parse(name, source, xContentType); request.setName(name); request.setEnabled(mapping.isEnabled()); request.setRoles(mapping.getRoles()); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/CrossClusterAccessSubjectInfo.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/CrossClusterAccessSubjectInfo.java index f91df320bb92d..82bfc4b4a0dd4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/CrossClusterAccessSubjectInfo.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/CrossClusterAccessSubjectInfo.java @@ -224,7 +224,10 @@ public static final class RoleDescriptorsBytes implements Writeable { public static final RoleDescriptorsBytes EMPTY = new RoleDescriptorsBytes(new BytesArray("{}")); - private static final RoleDescriptor.Parser ROLE_DESCRIPTOR_PARSER = RoleDescriptor.parserBuilder().build(); + private static final RoleDescriptor.Parser ROLE_DESCRIPTOR_PARSER = RoleDescriptor.parserBuilder() + .allowRestriction(true) + .allowDescription(true) + .build(); private final BytesReference rawBytes; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java index caa5567364cd3..1dc293f929121 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java @@ -49,6 +49,8 @@ import java.util.Map; import java.util.Objects; +import static org.elasticsearch.common.xcontent.XContentHelper.createParserNotCompressed; + /** * A holder for a Role that contains user-readable information about the Role * without containing the actual Role object. @@ -70,6 +72,7 @@ public class RoleDescriptor implements ToXContentObject, Writeable { private final Restriction restriction; private final Map metadata; private final Map transientMetadata; + private final String description; /** * Needed as a stop-gap measure because {@link FieldPermissionsCache} has state (settings) but we need to use one @@ -93,7 +96,7 @@ public RoleDescriptor( /** * @deprecated Use {@link #RoleDescriptor(String, String[], IndicesPrivileges[], ApplicationResourcePrivileges[], - * ConfigurableClusterPrivilege[], String[], Map, Map, RemoteIndicesPrivileges[], RemoteClusterPermissions, Restriction)} + * ConfigurableClusterPrivilege[], String[], Map, Map, RemoteIndicesPrivileges[], RemoteClusterPermissions, Restriction, String)} */ @Deprecated public RoleDescriptor( @@ -108,7 +111,7 @@ public RoleDescriptor( /** * @deprecated Use {@link #RoleDescriptor(String, String[], IndicesPrivileges[], ApplicationResourcePrivileges[], - * ConfigurableClusterPrivilege[], String[], Map, Map, RemoteIndicesPrivileges[], RemoteClusterPermissions, Restriction)} + * ConfigurableClusterPrivilege[], String[], Map, Map, RemoteIndicesPrivileges[], RemoteClusterPermissions, Restriction, String)} */ @Deprecated public RoleDescriptor( @@ -130,7 +133,8 @@ public RoleDescriptor( transientMetadata, RemoteIndicesPrivileges.NONE, RemoteClusterPermissions.NONE, - Restriction.NONE + Restriction.NONE, + null ); } @@ -155,7 +159,8 @@ public RoleDescriptor( transientMetadata, RemoteIndicesPrivileges.NONE, RemoteClusterPermissions.NONE, - Restriction.NONE + Restriction.NONE, + null ); } @@ -170,7 +175,8 @@ public RoleDescriptor( @Nullable Map transientMetadata, @Nullable RemoteIndicesPrivileges[] remoteIndicesPrivileges, @Nullable RemoteClusterPermissions remoteClusterPermissions, - @Nullable Restriction restriction + @Nullable Restriction restriction, + @Nullable String description ) { this.name = name; this.clusterPrivileges = clusterPrivileges != null ? clusterPrivileges : Strings.EMPTY_ARRAY; @@ -187,6 +193,7 @@ public RoleDescriptor( ? remoteClusterPermissions : RemoteClusterPermissions.NONE; this.restriction = restriction != null ? restriction : Restriction.NONE; + this.description = description != null ? description : ""; } public RoleDescriptor(StreamInput in) throws IOException { @@ -218,12 +225,21 @@ public RoleDescriptor(StreamInput in) throws IOException { } else { this.remoteClusterPermissions = RemoteClusterPermissions.NONE; } + if (in.getTransportVersion().onOrAfter(TransportVersions.SECURITY_ROLE_DESCRIPTION)) { + this.description = in.readOptionalString(); + } else { + this.description = ""; + } } public String getName() { return this.name; } + public String getDescription() { + return description; + } + public String[] getClusterPrivileges() { return this.clusterPrivileges; } @@ -272,6 +288,10 @@ public boolean hasRunAs() { return runAs.length != 0; } + public boolean hasDescription() { + return description.length() != 0; + } + public boolean hasUnsupportedPrivilegesInsideAPIKeyConnectedRemoteCluster() { return hasConfigurableClusterPrivileges() || hasApplicationPrivileges() @@ -338,6 +358,7 @@ public String toString() { sb.append(group.toString()).append(","); } sb.append("], restriction=").append(restriction); + sb.append(", description=").append(description); sb.append("]"); return sb.toString(); } @@ -358,7 +379,8 @@ public boolean equals(Object o) { if (Arrays.equals(runAs, that.runAs) == false) return false; if (Arrays.equals(remoteIndicesPrivileges, that.remoteIndicesPrivileges) == false) return false; if (remoteClusterPermissions.equals(that.remoteClusterPermissions) == false) return false; - return restriction.equals(that.restriction); + if (restriction.equals(that.restriction) == false) return false; + return Objects.equals(description, that.description); } @Override @@ -373,6 +395,7 @@ public int hashCode() { result = 31 * result + Arrays.hashCode(remoteIndicesPrivileges); result = 31 * result + remoteClusterPermissions.hashCode(); result = 31 * result + restriction.hashCode(); + result = 31 * result + Objects.hashCode(description); return result; } @@ -431,6 +454,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params, boolea if (hasRestriction()) { builder.field(Fields.RESTRICTION.getPreferredName(), restriction); } + if (hasDescription()) { + builder.field(Fields.DESCRIPTION.getPreferredName(), description); + } return builder.endObject(); } @@ -456,17 +482,22 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.ROLE_REMOTE_CLUSTER_PRIVS)) { remoteClusterPermissions.writeTo(out); } + if (out.getTransportVersion().onOrAfter(TransportVersions.SECURITY_ROLE_DESCRIPTION)) { + out.writeOptionalString(description); + } } public static Parser.Builder parserBuilder() { return new Parser.Builder(); } - public record Parser(boolean allow2xFormat, boolean allowRestriction) { + public record Parser(boolean allow2xFormat, boolean allowRestriction, boolean allowDescription) { public static final class Builder { + private boolean allow2xFormat = false; private boolean allowRestriction = false; + private boolean allowDescription = false; private Builder() {} @@ -480,8 +511,13 @@ public Builder allowRestriction(boolean allowRestriction) { return this; } + public Builder allowDescription(boolean allowDescription) { + this.allowDescription = allowDescription; + return this; + } + public Parser build() { - return new Parser(allow2xFormat, allowRestriction); + return new Parser(allow2xFormat, allowRestriction, allowDescription); } } @@ -565,6 +601,8 @@ public RoleDescriptor parse(String name, XContentParser parser) throws IOExcepti remoteClusterPermissions = parseRemoteCluster(name, parser); } else if (allowRestriction && Fields.RESTRICTION.match(currentFieldName, parser.getDeprecationHandler())) { restriction = Restriction.parse(name, parser); + } else if (allowDescription && Fields.DESCRIPTION.match(currentFieldName, parser.getDeprecationHandler())) { + description = parser.text(); } else if (Fields.TYPE.match(currentFieldName, parser.getDeprecationHandler())) { // don't need it } else { @@ -586,7 +624,8 @@ public RoleDescriptor parse(String name, XContentParser parser) throws IOExcepti null, remoteIndicesPrivileges, remoteClusterPermissions, - restriction + restriction, + description ); } @@ -686,7 +725,7 @@ public static PrivilegesToCheck parsePrivilegesToCheck( } private static XContentParser createParser(BytesReference source, XContentType xContentType) throws IOException { - return XContentHelper.createParserNotCompressed(LoggingDeprecationHandler.XCONTENT_PARSER_CONFIG, source, xContentType); + return createParserNotCompressed(LoggingDeprecationHandler.XCONTENT_PARSER_CONFIG, source, xContentType); } public static RoleDescriptor.IndicesPrivileges[] parseIndices(String roleName, XContentParser parser, boolean allow2xFormat) @@ -1821,5 +1860,6 @@ public interface Fields { ParseField TYPE = new ParseField("type"); ParseField RESTRICTION = new ParseField("restriction"); ParseField WORKFLOWS = new ParseField("workflows"); + ParseField DESCRIPTION = new ParseField("description"); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorsIntersection.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorsIntersection.java index 446209b1d7ac3..38aa1bc106e99 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorsIntersection.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorsIntersection.java @@ -26,7 +26,10 @@ public record RoleDescriptorsIntersection(Collection> roleDe public static RoleDescriptorsIntersection EMPTY = new RoleDescriptorsIntersection(Collections.emptyList()); - private static final RoleDescriptor.Parser ROLE_DESCRIPTOR_PARSER = RoleDescriptor.parserBuilder().allowRestriction(true).build(); + private static final RoleDescriptor.Parser ROLE_DESCRIPTOR_PARSER = RoleDescriptor.parserBuilder() + .allowRestriction(true) + .allowDescription(true) + .build(); public RoleDescriptorsIntersection(RoleDescriptor roleDescriptor) { this(List.of(Set.of(roleDescriptor))); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java index 372b62cffeaea..7f927d45a2375 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java @@ -53,6 +53,7 @@ import org.elasticsearch.xpack.core.security.action.user.ProfileHasPrivilegesAction; import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.support.Automatons; +import org.elasticsearch.xpack.core.slm.action.GetSLMStatusAction; import org.elasticsearch.xpack.core.slm.action.GetSnapshotLifecycleAction; import java.util.Collection; @@ -165,7 +166,11 @@ public class ClusterPrivilegeResolver { ILMActions.STOP.name(), GetStatusAction.NAME ); - private static final Set READ_SLM_PATTERN = Set.of(GetSnapshotLifecycleAction.NAME, GetStatusAction.NAME); + private static final Set READ_SLM_PATTERN = Set.of( + GetSLMStatusAction.NAME, + GetSnapshotLifecycleAction.NAME, + GetStatusAction.NAME + ); private static final Set MANAGE_SEARCH_APPLICATION_PATTERN = Set.of("cluster:admin/xpack/application/search_application/*"); private static final Set MANAGE_SEARCH_QUERY_RULES_PATTERN = Set.of("cluster:admin/xpack/query_rules/*"); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java index 8e4f9108c3b9c..49be4c5d466b2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java @@ -407,6 +407,7 @@ static RoleDescriptor kibanaSystem(String name) { getRemoteIndicesReadPrivileges("traces-apm.*"), getRemoteIndicesReadPrivileges("traces-apm-*") }, null, + null, null ); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java index 0793578004a4e..dd8f34a60fa1f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java @@ -102,6 +102,7 @@ public class ReservedRolesStore implements BiConsumer, ActionListene new String[] { "*" } ) ), + null, null ); @@ -201,6 +202,7 @@ private static Map initializeReservedRoles() { getRemoteIndicesReadPrivileges("/metrics-(beats|elasticsearch|enterprisesearch|kibana|logstash).*/"), getRemoteIndicesReadPrivileges("metricbeat-*") }, null, + null, null ) ), diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/Validation.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/Validation.java index 3c482b82075fc..eaf59e001d098 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/Validation.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/Validation.java @@ -6,6 +6,7 @@ */ package org.elasticsearch.xpack.core.security.support; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.xpack.core.security.authc.esnative.ClientReservedRealm; @@ -204,10 +205,19 @@ public static Error validatePassword(SecureString password) { public static final class Roles { + public static final int MAX_DESCRIPTION_LENGTH = 1000; + public static Error validateRoleName(String roleName, boolean allowReserved) { return validateRoleName(roleName, allowReserved, MAX_NAME_LENGTH); } + public static Error validateRoleDescription(String description) { + if (description != null && description.length() > MAX_DESCRIPTION_LENGTH) { + return new Error(Strings.format("Role description must be less than %s characters.", MAX_DESCRIPTION_LENGTH)); + } + return null; + } + static Error validateRoleName(String roleName, boolean allowReserved, int maxLength) { if (roleName == null) { return new Error("role name is missing"); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/SystemUser.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/SystemUser.java index 1413d7f87eaa1..a1b141d0aa0e8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/SystemUser.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/SystemUser.java @@ -46,6 +46,7 @@ public class SystemUser extends InternalUser { null, null, null, + null, null ); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetTransformStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetTransformStatsAction.java index 0333322d2acc5..6fe4427b1065c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetTransformStatsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetTransformStatsAction.java @@ -77,7 +77,7 @@ public Request(StreamInput in) throws IOException { expandedIds = in.readCollectionAsImmutableList(StreamInput::readString); pageParams = new PageParams(in); allowNoMatch = in.readBoolean(); - if (in.getTransportVersion().onOrAfter(TransportVersions.TRANSFORM_GET_BASIC_STATS)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { basic = in.readBoolean(); } else { basic = false; @@ -130,7 +130,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeStringCollection(expandedIds); pageParams.writeTo(out); out.writeBoolean(allowNoMatch); - if (out.getTransportVersion().onOrAfter(TransportVersions.TRANSFORM_GET_BASIC_STATS)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeBoolean(basic); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/action/InferenceActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/action/InferenceActionRequestTests.java index cef2d710237cf..476167c5db0fb 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/action/InferenceActionRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/action/InferenceActionRequestTests.java @@ -192,7 +192,7 @@ protected InferenceAction.Request mutateInstanceForVersion(InferenceAction.Reque InputType.UNSPECIFIED, InferenceAction.Request.DEFAULT_TIMEOUT ); - } else if (version.before(TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED)) { + } else if (version.before(TransportVersions.V_8_13_0)) { return new InferenceAction.Request( instance.getTaskType(), instance.getInferenceEntityId(), @@ -202,7 +202,7 @@ protected InferenceAction.Request mutateInstanceForVersion(InferenceAction.Reque InputType.UNSPECIFIED, InferenceAction.Request.DEFAULT_TIMEOUT ); - } else if (version.before(TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_UNSPECIFIED_ADDED) + } else if (version.before(TransportVersions.V_8_13_0) && (instance.getInputType() == InputType.UNSPECIFIED || instance.getInputType() == InputType.CLASSIFICATION || instance.getInputType() == InputType.CLUSTERING)) { @@ -215,7 +215,7 @@ protected InferenceAction.Request mutateInstanceForVersion(InferenceAction.Reque InputType.INGEST, InferenceAction.Request.DEFAULT_TIMEOUT ); - } else if (version.before(TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_CLASS_CLUSTER_ADDED) + } else if (version.before(TransportVersions.V_8_13_0) && (instance.getInputType() == InputType.CLUSTERING || instance.getInputType() == InputType.CLASSIFICATION)) { return new InferenceAction.Request( instance.getTaskType(), @@ -262,138 +262,10 @@ public void testWriteTo_WhenVersionIsOnAfterUnspecifiedAdded() throws IOExceptio InputType.UNSPECIFIED, InferenceAction.Request.DEFAULT_TIMEOUT ), - TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_UNSPECIFIED_ADDED + TransportVersions.V_8_13_0 ); } - public void testWriteTo_WhenVersionIsBeforeUnspecifiedAdded_ButAfterInputTypeAdded_ShouldSetToIngest() throws IOException { - assertBwcSerialization( - new InferenceAction.Request( - TaskType.TEXT_EMBEDDING, - "model", - null, - List.of(), - Map.of(), - InputType.UNSPECIFIED, - InferenceAction.Request.DEFAULT_TIMEOUT - ), - TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED - ); - } - - public void testWriteTo_WhenVersionIsBeforeUnspecifiedAdded_ButAfterInputTypeAdded_ShouldSetToIngest_ManualCheck() throws IOException { - var instance = new InferenceAction.Request( - TaskType.TEXT_EMBEDDING, - "model", - null, - List.of(), - Map.of(), - InputType.UNSPECIFIED, - InferenceAction.Request.DEFAULT_TIMEOUT - ); - - InferenceAction.Request deserializedInstance = copyWriteable( - instance, - getNamedWriteableRegistry(), - instanceReader(), - TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED - ); - - assertThat(deserializedInstance.getInputType(), is(InputType.INGEST)); - } - - public void testWriteTo_WhenVersionIsBeforeUnspecifiedAdded_ButAfterInputTypeAdded_ShouldSetToIngest_WhenClustering_ManualCheck() - throws IOException { - var instance = new InferenceAction.Request( - TaskType.TEXT_EMBEDDING, - "model", - null, - List.of(), - Map.of(), - InputType.CLUSTERING, - InferenceAction.Request.DEFAULT_TIMEOUT - ); - - InferenceAction.Request deserializedInstance = copyWriteable( - instance, - getNamedWriteableRegistry(), - instanceReader(), - TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED - ); - - assertThat(deserializedInstance.getInputType(), is(InputType.INGEST)); - } - - public void testWriteTo_WhenVersionIsBeforeUnspecifiedAdded_ButAfterInputTypeAdded_ShouldSetToIngest_WhenClassification_ManualCheck() - throws IOException { - var instance = new InferenceAction.Request( - TaskType.TEXT_EMBEDDING, - "model", - null, - List.of(), - Map.of(), - InputType.CLASSIFICATION, - InferenceAction.Request.DEFAULT_TIMEOUT - ); - - InferenceAction.Request deserializedInstance = copyWriteable( - instance, - getNamedWriteableRegistry(), - instanceReader(), - TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED - ); - - assertThat(deserializedInstance.getInputType(), is(InputType.INGEST)); - } - - public - void - testWriteTo_WhenVersionIsBeforeClusterClassAdded_ButAfterUnspecifiedAdded_ShouldSetToUnspecified_WhenClassification_ManualCheck() - throws IOException { - var instance = new InferenceAction.Request( - TaskType.TEXT_EMBEDDING, - "model", - null, - List.of(), - Map.of(), - InputType.CLASSIFICATION, - InferenceAction.Request.DEFAULT_TIMEOUT - ); - - InferenceAction.Request deserializedInstance = copyWriteable( - instance, - getNamedWriteableRegistry(), - instanceReader(), - TransportVersions.ML_TEXT_EMBEDDING_INFERENCE_SERVICE_ADDED - ); - - assertThat(deserializedInstance.getInputType(), is(InputType.UNSPECIFIED)); - } - - public - void - testWriteTo_WhenVersionIsBeforeClusterClassAdded_ButAfterUnspecifiedAdded_ShouldSetToUnspecified_WhenClustering_ManualCheck() - throws IOException { - var instance = new InferenceAction.Request( - TaskType.TEXT_EMBEDDING, - "model", - null, - List.of(), - Map.of(), - InputType.CLUSTERING, - InferenceAction.Request.DEFAULT_TIMEOUT - ); - - InferenceAction.Request deserializedInstance = copyWriteable( - instance, - getNamedWriteableRegistry(), - instanceReader(), - TransportVersions.ML_TEXT_EMBEDDING_INFERENCE_SERVICE_ADDED - ); - - assertThat(deserializedInstance.getInputType(), is(InputType.UNSPECIFIED)); - } - public void testWriteTo_WhenVersionIsBeforeInputTypeAdded_ShouldSetInputTypeToUnspecified() throws IOException { var instance = new InferenceAction.Request( TaskType.TEXT_EMBEDDING, @@ -409,44 +281,21 @@ public void testWriteTo_WhenVersionIsBeforeInputTypeAdded_ShouldSetInputTypeToUn instance, getNamedWriteableRegistry(), instanceReader(), - TransportVersions.HOT_THREADS_AS_BYTES + TransportVersions.V_8_12_1 ); assertThat(deserializedInstance.getInputType(), is(InputType.UNSPECIFIED)); } public void testGetInputTypeToWrite_ReturnsIngest_WhenInputTypeIsUnspecified_VersionBeforeUnspecifiedIntroduced() { - assertThat( - getInputTypeToWrite(InputType.UNSPECIFIED, TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED), - is(InputType.INGEST) - ); + assertThat(getInputTypeToWrite(InputType.UNSPECIFIED, TransportVersions.V_8_12_1), is(InputType.INGEST)); } public void testGetInputTypeToWrite_ReturnsIngest_WhenInputTypeIsClassification_VersionBeforeUnspecifiedIntroduced() { - assertThat( - getInputTypeToWrite(InputType.CLASSIFICATION, TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED), - is(InputType.INGEST) - ); + assertThat(getInputTypeToWrite(InputType.CLASSIFICATION, TransportVersions.V_8_12_1), is(InputType.INGEST)); } public void testGetInputTypeToWrite_ReturnsIngest_WhenInputTypeIsClustering_VersionBeforeUnspecifiedIntroduced() { - assertThat( - getInputTypeToWrite(InputType.CLUSTERING, TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED), - is(InputType.INGEST) - ); - } - - public void testGetInputTypeToWrite_ReturnsUnspecified_WhenInputTypeIsClassification_VersionBeforeClusteringClassIntroduced() { - assertThat( - getInputTypeToWrite(InputType.CLUSTERING, TransportVersions.ML_TEXT_EMBEDDING_INFERENCE_SERVICE_ADDED), - is(InputType.UNSPECIFIED) - ); - } - - public void testGetInputTypeToWrite_ReturnsUnspecified_WhenInputTypeIsClustering_VersionBeforeClusteringClassIntroduced() { - assertThat( - getInputTypeToWrite(InputType.CLASSIFICATION, TransportVersions.ML_TEXT_EMBEDDING_INFERENCE_SERVICE_ADDED), - is(InputType.UNSPECIFIED) - ); + assertThat(getInputTypeToWrite(InputType.CLUSTERING, TransportVersions.V_8_12_1), is(InputType.INGEST)); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/CoordinatedInferenceActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/CoordinatedInferenceActionRequestTests.java index 9c435bd37b2cb..3ab5851815474 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/CoordinatedInferenceActionRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/CoordinatedInferenceActionRequestTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.core.ml.action; import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase; @@ -22,23 +23,21 @@ import java.util.stream.Collectors; import java.util.stream.Stream; -import static org.elasticsearch.TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED; -import static org.elasticsearch.TransportVersions.UPDATE_API_KEY_EXPIRATION_TIME_ADDED; import static org.hamcrest.Matchers.is; public class CoordinatedInferenceActionRequestTests extends AbstractBWCWireSerializationTestCase { public void testSerializesPrefixType_WhenTransportVersionIs_InputTypeAdded() throws IOException { var instance = createTestInstance(); instance.setPrefixType(TrainedModelPrefixStrings.PrefixType.INGEST); - var copy = copyWriteable(instance, getNamedWriteableRegistry(), instanceReader(), ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED); - assertOnBWCObject(copy, instance, ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED); + var copy = copyWriteable(instance, getNamedWriteableRegistry(), instanceReader(), TransportVersions.V_8_13_0); + assertOnBWCObject(copy, instance, TransportVersions.V_8_13_0); assertThat(copy.getPrefixType(), is(TrainedModelPrefixStrings.PrefixType.INGEST)); } public void testSerializesPrefixType_DoesNotSerialize_WhenTransportVersion_IsPriorToInputTypeAdded() throws IOException { var instance = createTestInstance(); instance.setPrefixType(TrainedModelPrefixStrings.PrefixType.INGEST); - var copy = copyWriteable(instance, getNamedWriteableRegistry(), instanceReader(), UPDATE_API_KEY_EXPIRATION_TIME_ADDED); + var copy = copyWriteable(instance, getNamedWriteableRegistry(), instanceReader(), TransportVersions.V_8_12_1); assertNotSame(copy, instance); assertNotEquals(copy, instance); @@ -117,7 +116,7 @@ protected CoordinatedInferenceAction.Request mutateInstanceForVersion( CoordinatedInferenceAction.Request instance, TransportVersion version ) { - if (version.before(ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED)) { + if (version.before(TransportVersions.V_8_13_0)) { instance.setPrefixType(TrainedModelPrefixStrings.PrefixType.NONE); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/ApiKeyTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/ApiKeyTests.java index 710c4c5adaf67..1bad9bdfbfc77 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/ApiKeyTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/ApiKeyTests.java @@ -30,8 +30,8 @@ import java.util.Set; import java.util.concurrent.TimeUnit; -import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTests.randomCrossClusterAccessRoleDescriptor; -import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTests.randomUniquelyNamedRoleDescriptors; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomCrossClusterAccessRoleDescriptor; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomUniquelyNamedRoleDescriptors; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasKey; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/BulkUpdateApiKeyRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/BulkUpdateApiKeyRequestTests.java index 525c805f37929..78cf2020f26cc 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/BulkUpdateApiKeyRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/BulkUpdateApiKeyRequestTests.java @@ -71,7 +71,8 @@ public void testRoleDescriptorValidation() { null, null, null, - new RoleDescriptor.Restriction(unknownWorkflows) + new RoleDescriptor.Restriction(unknownWorkflows), + null ) ), null, diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/CreateApiKeyRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/CreateApiKeyRequestTests.java index 17298c04709a4..bb7778b821457 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/CreateApiKeyRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/CreateApiKeyRequestTests.java @@ -106,7 +106,8 @@ public void testRoleDescriptorValidation() { null, null, null, - new RoleDescriptor.Restriction(unknownWorkflows) + new RoleDescriptor.Restriction(unknownWorkflows), + null ) ), null diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateApiKeyRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateApiKeyRequestTests.java index 161e9419f9561..03706d928caad 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateApiKeyRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateApiKeyRequestTests.java @@ -63,7 +63,8 @@ public void testRoleDescriptorValidation() { null, null, null, - new RoleDescriptor.Restriction(workflows.toArray(String[]::new)) + new RoleDescriptor.Restriction(workflows.toArray(String[]::new)), + null ) ), null, diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/AuthenticationTestHelper.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/AuthenticationTestHelper.java index b7495004e58e7..483b2426e6ad2 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/AuthenticationTestHelper.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/AuthenticationTestHelper.java @@ -314,6 +314,7 @@ public static CrossClusterAccessSubjectInfo randomCrossClusterAccessSubjectInfo( null, null, null, + null, null ) ) diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/CrossClusterAccessSubjectInfoTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/CrossClusterAccessSubjectInfoTests.java index f22bf886357c4..ec20e6e5fa2ff 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/CrossClusterAccessSubjectInfoTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/CrossClusterAccessSubjectInfoTests.java @@ -31,7 +31,7 @@ import java.util.stream.Collectors; import static org.elasticsearch.xpack.core.security.authc.CrossClusterAccessSubjectInfo.CROSS_CLUSTER_ACCESS_SUBJECT_INFO_HEADER_KEY; -import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTests.randomUniquelyNamedRoleDescriptors; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomUniquelyNamedRoleDescriptors; import static org.hamcrest.Matchers.anEmptyMap; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasKey; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorTestHelper.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorTestHelper.java new file mode 100644 index 0000000000000..e6b9097a023cc --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorTestHelper.java @@ -0,0 +1,314 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.security.authz; + +import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.core.Strings; +import org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissionGroup; +import org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissions; +import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilegeResolver; +import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivilege; +import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivileges; +import org.elasticsearch.xpack.core.security.authz.privilege.IndexPrivilege; +import org.elasticsearch.xpack.core.security.support.MetadataUtils; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import static org.elasticsearch.test.ESTestCase.generateRandomStringArray; +import static org.elasticsearch.test.ESTestCase.randomAlphaOfLengthBetween; +import static org.elasticsearch.test.ESTestCase.randomBoolean; +import static org.elasticsearch.test.ESTestCase.randomInt; +import static org.elasticsearch.test.ESTestCase.randomIntBetween; +import static org.elasticsearch.test.ESTestCase.randomList; +import static org.elasticsearch.test.ESTestCase.randomNonEmptySubsetOf; +import static org.elasticsearch.test.ESTestCase.randomSubsetOf; +import static org.elasticsearch.test.ESTestCase.randomValueOtherThanMany; +import static org.elasticsearch.xpack.core.security.action.apikey.CrossClusterApiKeyRoleDescriptorBuilder.CCR_CLUSTER_PRIVILEGE_NAMES; +import static org.elasticsearch.xpack.core.security.action.apikey.CrossClusterApiKeyRoleDescriptorBuilder.CCR_INDICES_PRIVILEGE_NAMES; +import static org.elasticsearch.xpack.core.security.action.apikey.CrossClusterApiKeyRoleDescriptorBuilder.CCS_AND_CCR_CLUSTER_PRIVILEGE_NAMES; +import static org.elasticsearch.xpack.core.security.action.apikey.CrossClusterApiKeyRoleDescriptorBuilder.CCS_CLUSTER_PRIVILEGE_NAMES; +import static org.elasticsearch.xpack.core.security.action.apikey.CrossClusterApiKeyRoleDescriptorBuilder.CCS_INDICES_PRIVILEGE_NAMES; +import static org.elasticsearch.xpack.core.security.action.apikey.CrossClusterApiKeyRoleDescriptorBuilder.ROLE_DESCRIPTOR_NAME; + +public final class RoleDescriptorTestHelper { + + public static Builder builder() { + return new Builder(); + } + + public static RoleDescriptor randomRoleDescriptor() { + return builder().allowReservedMetadata(randomBoolean()) + .allowRemoteIndices(randomBoolean()) + .allowRestriction(randomBoolean()) + .allowDescription(randomBoolean()) + .allowRemoteClusters(randomBoolean()) + .build(); + } + + public static Map randomRoleDescriptorMetadata(boolean allowReservedMetadata) { + final Map metadata = new HashMap<>(); + while (randomBoolean()) { + String key = randomAlphaOfLengthBetween(4, 12); + if (allowReservedMetadata && randomBoolean()) { + key = MetadataUtils.RESERVED_PREFIX + key; + } + final Object value = randomBoolean() ? randomInt() : randomAlphaOfLengthBetween(3, 50); + metadata.put(key, value); + } + return metadata; + } + + public static ConfigurableClusterPrivilege[] randomClusterPrivileges() { + final ConfigurableClusterPrivilege[] configurableClusterPrivileges = switch (randomIntBetween(0, 4)) { + case 0 -> new ConfigurableClusterPrivilege[0]; + case 1 -> new ConfigurableClusterPrivilege[] { + new ConfigurableClusterPrivileges.ManageApplicationPrivileges( + Sets.newHashSet(generateRandomStringArray(3, randomIntBetween(4, 12), false, false)) + ) }; + case 2 -> new ConfigurableClusterPrivilege[] { + new ConfigurableClusterPrivileges.WriteProfileDataPrivileges( + Sets.newHashSet(generateRandomStringArray(3, randomIntBetween(4, 12), false, false)) + ) }; + case 3 -> new ConfigurableClusterPrivilege[] { + new ConfigurableClusterPrivileges.WriteProfileDataPrivileges( + Sets.newHashSet(generateRandomStringArray(3, randomIntBetween(4, 12), false, false)) + ), + new ConfigurableClusterPrivileges.ManageApplicationPrivileges( + Sets.newHashSet(generateRandomStringArray(3, randomIntBetween(4, 12), false, false)) + ) }; + case 4 -> new ConfigurableClusterPrivilege[] { + new ConfigurableClusterPrivileges.ManageApplicationPrivileges( + Sets.newHashSet(generateRandomStringArray(3, randomIntBetween(4, 12), false, false)) + ), + new ConfigurableClusterPrivileges.WriteProfileDataPrivileges( + Sets.newHashSet(generateRandomStringArray(3, randomIntBetween(4, 12), false, false)) + ) }; + default -> throw new IllegalStateException("Unexpected value"); + }; + return configurableClusterPrivileges; + } + + public static RoleDescriptor.ApplicationResourcePrivileges[] randomApplicationPrivileges() { + final RoleDescriptor.ApplicationResourcePrivileges[] applicationPrivileges = + new RoleDescriptor.ApplicationResourcePrivileges[randomIntBetween(0, 2)]; + for (int i = 0; i < applicationPrivileges.length; i++) { + final RoleDescriptor.ApplicationResourcePrivileges.Builder builder = RoleDescriptor.ApplicationResourcePrivileges.builder(); + builder.application("app" + randomAlphaOfLengthBetween(5, 12) + (randomBoolean() ? "*" : "")); + if (randomBoolean()) { + builder.privileges("*"); + } else { + builder.privileges(generateRandomStringArray(6, randomIntBetween(4, 8), false, false)); + } + if (randomBoolean()) { + builder.resources("*"); + } else { + builder.resources(generateRandomStringArray(6, randomIntBetween(4, 8), false, false)); + } + applicationPrivileges[i] = builder.build(); + } + return applicationPrivileges; + } + + public static RoleDescriptor.RemoteIndicesPrivileges[] randomRemoteIndicesPrivileges(int min, int max) { + return randomRemoteIndicesPrivileges(min, max, Set.of()); + } + + public static RoleDescriptor.RemoteIndicesPrivileges[] randomRemoteIndicesPrivileges(int min, int max, Set excludedPrivileges) { + final RoleDescriptor.IndicesPrivileges[] innerIndexPrivileges = randomIndicesPrivileges(min, max, excludedPrivileges); + final RoleDescriptor.RemoteIndicesPrivileges[] remoteIndexPrivileges = + new RoleDescriptor.RemoteIndicesPrivileges[innerIndexPrivileges.length]; + for (int i = 0; i < remoteIndexPrivileges.length; i++) { + remoteIndexPrivileges[i] = new RoleDescriptor.RemoteIndicesPrivileges( + innerIndexPrivileges[i], + generateRandomStringArray(5, randomIntBetween(3, 9), false, false) + ); + } + return remoteIndexPrivileges; + } + + public static RoleDescriptor.IndicesPrivileges[] randomIndicesPrivileges(int min, int max) { + return randomIndicesPrivileges(min, max, Set.of()); + } + + public static RoleDescriptor.IndicesPrivileges[] randomIndicesPrivileges(int min, int max, Set excludedPrivileges) { + final RoleDescriptor.IndicesPrivileges[] indexPrivileges = new RoleDescriptor.IndicesPrivileges[randomIntBetween(min, max)]; + for (int i = 0; i < indexPrivileges.length; i++) { + indexPrivileges[i] = randomIndicesPrivilegesBuilder(excludedPrivileges).build(); + } + return indexPrivileges; + } + + public static RoleDescriptor.IndicesPrivileges.Builder randomIndicesPrivilegesBuilder() { + return randomIndicesPrivilegesBuilder(Set.of()); + } + + private static RoleDescriptor.IndicesPrivileges.Builder randomIndicesPrivilegesBuilder(Set excludedPrivileges) { + final Set candidatePrivilegesNames = Sets.difference(IndexPrivilege.names(), excludedPrivileges); + assert false == candidatePrivilegesNames.isEmpty() : "no candidate privilege names to random from"; + final RoleDescriptor.IndicesPrivileges.Builder builder = RoleDescriptor.IndicesPrivileges.builder() + .privileges(randomSubsetOf(randomIntBetween(1, 4), candidatePrivilegesNames)) + .indices(generateRandomStringArray(5, randomIntBetween(3, 9), false, false)) + .allowRestrictedIndices(randomBoolean()); + randomDlsFls(builder); + return builder; + } + + private static void randomDlsFls(RoleDescriptor.IndicesPrivileges.Builder builder) { + if (randomBoolean()) { + builder.query(randomBoolean() ? Strings.format(""" + { "term": { "%s" : "%s" } } + """, randomAlphaOfLengthBetween(3, 24), randomAlphaOfLengthBetween(3, 24)) : """ + { "match_all": {} } + """); + } + if (randomBoolean()) { + if (randomBoolean()) { + builder.grantedFields("*"); + builder.deniedFields(generateRandomStringArray(4, randomIntBetween(4, 9), false, false)); + } else { + builder.grantedFields(generateRandomStringArray(4, randomIntBetween(4, 9), false, false)); + } + } + } + + public static RoleDescriptor randomCrossClusterAccessRoleDescriptor() { + final int searchSize = randomIntBetween(0, 3); + final int replicationSize = randomIntBetween(searchSize == 0 ? 1 : 0, 3); + assert searchSize + replicationSize > 0; + + final String[] clusterPrivileges; + if (searchSize > 0 && replicationSize > 0) { + clusterPrivileges = CCS_AND_CCR_CLUSTER_PRIVILEGE_NAMES; + } else if (searchSize > 0) { + clusterPrivileges = CCS_CLUSTER_PRIVILEGE_NAMES; + } else { + clusterPrivileges = CCR_CLUSTER_PRIVILEGE_NAMES; + } + + final List indexPrivileges = new ArrayList<>(); + for (int i = 0; i < searchSize; i++) { + final RoleDescriptor.IndicesPrivileges.Builder builder = RoleDescriptor.IndicesPrivileges.builder() + .privileges(CCS_INDICES_PRIVILEGE_NAMES) + .indices(generateRandomStringArray(5, randomIntBetween(3, 9), false, false)) + .allowRestrictedIndices(randomBoolean()); + randomDlsFls(builder); + indexPrivileges.add(builder.build()); + } + for (int i = 0; i < replicationSize; i++) { + final RoleDescriptor.IndicesPrivileges.Builder builder = RoleDescriptor.IndicesPrivileges.builder() + .privileges(CCR_INDICES_PRIVILEGE_NAMES) + .indices(generateRandomStringArray(5, randomIntBetween(3, 9), false, false)) + .allowRestrictedIndices(randomBoolean()); + indexPrivileges.add(builder.build()); + } + + return new RoleDescriptor( + ROLE_DESCRIPTOR_NAME, + clusterPrivileges, + indexPrivileges.toArray(RoleDescriptor.IndicesPrivileges[]::new), + null + ); + } + + public static List randomUniquelyNamedRoleDescriptors(int minSize, int maxSize) { + return randomValueOtherThanMany( + roleDescriptors -> roleDescriptors.stream().map(RoleDescriptor::getName).distinct().count() != roleDescriptors.size(), + () -> randomList(minSize, maxSize, () -> builder().build()) + ); + } + + public static RemoteClusterPermissions randomRemoteClusterPermissions(int maxGroups) { + final RemoteClusterPermissions remoteClusterPermissions = new RemoteClusterPermissions(); + final String[] supportedPermissions = RemoteClusterPermissions.getSupportedRemoteClusterPermissions().toArray(new String[0]); + for (int i = 0; i < maxGroups; i++) { + remoteClusterPermissions.addGroup( + new RemoteClusterPermissionGroup( + randomNonEmptySubsetOf(Arrays.asList(supportedPermissions)).toArray(new String[0]), + generateRandomStringArray(5, randomIntBetween(3, 9), false, false) + ) + ); + } + return remoteClusterPermissions; + } + + public static class Builder { + + private boolean allowReservedMetadata = false; + private boolean allowRemoteIndices = false; + private boolean alwaysIncludeRemoteIndices = false; + private boolean allowRestriction = false; + private boolean allowDescription = false; + private boolean allowRemoteClusters = false; + + public Builder() {} + + public Builder allowReservedMetadata(boolean allowReservedMetadata) { + this.allowReservedMetadata = allowReservedMetadata; + return this; + } + + public Builder alwaysIncludeRemoteIndices() { + this.alwaysIncludeRemoteIndices = true; + return this; + } + + public Builder allowRemoteIndices(boolean allowRemoteIndices) { + this.allowRemoteIndices = allowRemoteIndices; + return this; + } + + public Builder allowRestriction(boolean allowRestriction) { + this.allowRestriction = allowRestriction; + return this; + } + + public Builder allowDescription(boolean allowDescription) { + this.allowDescription = allowDescription; + return this; + } + + public Builder allowRemoteClusters(boolean allowRemoteClusters) { + this.allowRemoteClusters = allowRemoteClusters; + return this; + } + + public RoleDescriptor build() { + final RoleDescriptor.RemoteIndicesPrivileges[] remoteIndexPrivileges; + if (alwaysIncludeRemoteIndices || (allowRemoteIndices && randomBoolean())) { + remoteIndexPrivileges = randomRemoteIndicesPrivileges(0, 3); + } else { + remoteIndexPrivileges = null; + } + + RemoteClusterPermissions remoteClusters = RemoteClusterPermissions.NONE; + if (allowRemoteClusters && randomBoolean()) { + remoteClusters = randomRemoteClusterPermissions(randomIntBetween(1, 5)); + } + + return new RoleDescriptor( + randomAlphaOfLengthBetween(3, 90), + randomSubsetOf(ClusterPrivilegeResolver.names()).toArray(String[]::new), + randomIndicesPrivileges(0, 3), + randomApplicationPrivileges(), + randomClusterPrivileges(), + generateRandomStringArray(5, randomIntBetween(2, 8), false, true), + randomRoleDescriptorMetadata(allowReservedMetadata), + Map.of(), + remoteIndexPrivileges, + remoteClusters, + allowRestriction ? RoleRestrictionTests.randomWorkflowsRestriction(1, 3) : null, + allowDescription ? randomAlphaOfLengthBetween(0, 20) : null + ); + } + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorTests.java index a3a590dc5a4d4..d7b9f9ddd5b58 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorTests.java @@ -31,33 +31,24 @@ import org.elasticsearch.xpack.core.XPackClientPlugin; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor.ApplicationResourcePrivileges; import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissionsCache; -import org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissionGroup; import org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissions; -import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilegeResolver; import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivilege; import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivileges; -import org.elasticsearch.xpack.core.security.authz.privilege.IndexPrivilege; -import org.elasticsearch.xpack.core.security.support.MetadataUtils; import org.hamcrest.Matchers; import java.io.IOException; -import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; -import java.util.Set; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.xpack.core.security.action.apikey.CrossClusterApiKeyRoleDescriptorBuilder.CCR_CLUSTER_PRIVILEGE_NAMES; -import static org.elasticsearch.xpack.core.security.action.apikey.CrossClusterApiKeyRoleDescriptorBuilder.CCR_INDICES_PRIVILEGE_NAMES; -import static org.elasticsearch.xpack.core.security.action.apikey.CrossClusterApiKeyRoleDescriptorBuilder.CCS_AND_CCR_CLUSTER_PRIVILEGE_NAMES; -import static org.elasticsearch.xpack.core.security.action.apikey.CrossClusterApiKeyRoleDescriptorBuilder.CCS_CLUSTER_PRIVILEGE_NAMES; -import static org.elasticsearch.xpack.core.security.action.apikey.CrossClusterApiKeyRoleDescriptorBuilder.CCS_INDICES_PRIVILEGE_NAMES; -import static org.elasticsearch.xpack.core.security.action.apikey.CrossClusterApiKeyRoleDescriptorBuilder.ROLE_DESCRIPTOR_NAME; import static org.elasticsearch.xpack.core.security.authz.RoleDescriptor.WORKFLOWS_RESTRICTION_VERSION; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomIndicesPrivileges; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomIndicesPrivilegesBuilder; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomRemoteClusterPermissions; import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; @@ -156,17 +147,18 @@ public void testToString() { + ", field_security=[grant=[body,title], except=null], query={\"match_all\": {}}],]" + ", applicationPrivileges=[ApplicationResourcePrivileges[application=my_app, privileges=[read,write], resources=[*]],]" + ", runAs=[sudo], metadata=[{}], remoteIndicesPrivileges=[], remoteClusterPrivileges=[]" - + ", restriction=Restriction[workflows=[]]]" + + ", restriction=Restriction[workflows=[]], description=]" ) ); } public void testToXContentRoundtrip() throws Exception { - final RoleDescriptor descriptor = randomRoleDescriptor(true, true, true, true); + final RoleDescriptor descriptor = RoleDescriptorTestHelper.randomRoleDescriptor(); final XContentType xContentType = randomFrom(XContentType.values()); final BytesReference xContentValue = toShuffledXContent(descriptor, xContentType, ToXContent.EMPTY_PARAMS, false); final RoleDescriptor parsed = RoleDescriptor.parserBuilder() .allowRestriction(true) + .allowDescription(true) .build() .parse(descriptor.getName(), xContentValue, xContentType); assertThat(parsed, equalTo(descriptor)); @@ -268,9 +260,14 @@ public void testParse() throws Exception { ], "restriction":{ "workflows": ["search_application_query"] - } + }, + "description": "Lorem ipsum dolor sit amet, consectetur adipiscing elit." }"""; - rd = RoleDescriptor.parserBuilder().allowRestriction(true).build().parse("test", new BytesArray(q), XContentType.JSON); + rd = RoleDescriptor.parserBuilder() + .allowRestriction(true) + .allowDescription(true) + .build() + .parse("test", new BytesArray(q), XContentType.JSON); assertEquals("test", rd.getName()); assertArrayEquals(new String[] { "a", "b" }, rd.getClusterPrivileges()); assertEquals(3, rd.getIndicesPrivileges().length); @@ -594,16 +591,18 @@ public void testSerializationForCurrentVersion() throws Exception { final boolean canIncludeRemoteIndices = version.onOrAfter(TransportVersions.V_8_8_0); final boolean canIncludeRemoteClusters = version.onOrAfter(TransportVersions.ROLE_REMOTE_CLUSTER_PRIVS); final boolean canIncludeWorkflows = version.onOrAfter(WORKFLOWS_RESTRICTION_VERSION); + final boolean canIncludeDescription = version.onOrAfter(TransportVersions.SECURITY_ROLE_DESCRIPTION); logger.info("Testing serialization with version {}", version); BytesStreamOutput output = new BytesStreamOutput(); output.setTransportVersion(version); - final RoleDescriptor descriptor = randomRoleDescriptor( - true, - canIncludeRemoteIndices, - canIncludeWorkflows, - canIncludeRemoteClusters - ); + final RoleDescriptor descriptor = RoleDescriptorTestHelper.builder() + .allowReservedMetadata(true) + .allowRemoteIndices(canIncludeRemoteIndices) + .allowRestriction(canIncludeWorkflows) + .allowDescription(canIncludeDescription) + .allowRemoteClusters(canIncludeRemoteClusters) + .build(); descriptor.writeTo(output); final NamedWriteableRegistry registry = new NamedWriteableRegistry(new XPackClientPlugin().getNamedWriteables()); StreamInput streamInput = new NamedWriteableAwareStreamInput( @@ -626,7 +625,14 @@ public void testSerializationWithRemoteIndicesWithElderVersion() throws IOExcept final BytesStreamOutput output = new BytesStreamOutput(); output.setTransportVersion(version); - final RoleDescriptor descriptor = randomRoleDescriptor(true, true, false, false); + final RoleDescriptor descriptor = RoleDescriptorTestHelper.builder() + .allowReservedMetadata(true) + .allowRemoteIndices(true) + .allowRestriction(false) + .allowDescription(false) + .allowRemoteClusters(false) + .build(); + descriptor.writeTo(output); final NamedWriteableRegistry registry = new NamedWriteableRegistry(new XPackClientPlugin().getNamedWriteables()); StreamInput streamInput = new NamedWriteableAwareStreamInput( @@ -650,7 +656,8 @@ public void testSerializationWithRemoteIndicesWithElderVersion() throws IOExcept descriptor.getTransientMetadata(), null, null, - descriptor.getRestriction() + descriptor.getRestriction(), + descriptor.getDescription() ) ) ); @@ -671,7 +678,13 @@ public void testSerializationWithRemoteClusterWithElderVersion() throws IOExcept final BytesStreamOutput output = new BytesStreamOutput(); output.setTransportVersion(version); - final RoleDescriptor descriptor = randomRoleDescriptor(true, false, false, true); + final RoleDescriptor descriptor = RoleDescriptorTestHelper.builder() + .allowReservedMetadata(true) + .allowRemoteIndices(false) + .allowRestriction(false) + .allowDescription(false) + .allowRemoteClusters(true) + .build(); descriptor.writeTo(output); final NamedWriteableRegistry registry = new NamedWriteableRegistry(new XPackClientPlugin().getNamedWriteables()); StreamInput streamInput = new NamedWriteableAwareStreamInput( @@ -693,9 +706,10 @@ public void testSerializationWithRemoteClusterWithElderVersion() throws IOExcept descriptor.getRunAs(), descriptor.getMetadata(), descriptor.getTransientMetadata(), + descriptor.getRemoteIndicesPrivileges(), null, - descriptor.getRemoteClusterPermissions(), - descriptor.getRestriction() + descriptor.getRestriction(), + descriptor.getDescription() ) ) ); @@ -715,7 +729,13 @@ public void testSerializationWithWorkflowsRestrictionAndUnsupportedVersions() th final BytesStreamOutput output = new BytesStreamOutput(); output.setTransportVersion(version); - final RoleDescriptor descriptor = randomRoleDescriptor(true, false, true, false); + final RoleDescriptor descriptor = RoleDescriptorTestHelper.builder() + .allowReservedMetadata(true) + .allowRemoteIndices(false) + .allowRestriction(true) + .allowDescription(false) + .allowRemoteClusters(false) + .build(); descriptor.writeTo(output); final NamedWriteableRegistry registry = new NamedWriteableRegistry(new XPackClientPlugin().getNamedWriteables()); StreamInput streamInput = new NamedWriteableAwareStreamInput( @@ -739,7 +759,8 @@ public void testSerializationWithWorkflowsRestrictionAndUnsupportedVersions() th descriptor.getTransientMetadata(), descriptor.getRemoteIndicesPrivileges(), descriptor.getRemoteClusterPermissions(), - null + null, + descriptor.getDescription() ) ) ); @@ -793,6 +814,96 @@ public void testParseRoleWithRestrictionWhenAllowRestrictionIsTrue() throws IOEx assertThat(role.getRestriction().getWorkflows(), arrayContaining("search_application")); } + public void testSerializationWithDescriptionAndUnsupportedVersions() throws IOException { + final TransportVersion versionBeforeRoleDescription = TransportVersionUtils.getPreviousVersion( + TransportVersions.SECURITY_ROLE_DESCRIPTION + ); + final TransportVersion version = TransportVersionUtils.randomVersionBetween( + random(), + TransportVersions.V_7_17_0, + versionBeforeRoleDescription + ); + final BytesStreamOutput output = new BytesStreamOutput(); + output.setTransportVersion(version); + + final RoleDescriptor descriptor = RoleDescriptorTestHelper.builder().allowDescription(true).build(); + descriptor.writeTo(output); + final NamedWriteableRegistry registry = new NamedWriteableRegistry(new XPackClientPlugin().getNamedWriteables()); + StreamInput streamInput = new NamedWriteableAwareStreamInput( + ByteBufferStreamInput.wrap(BytesReference.toBytes(output.bytes())), + registry + ); + streamInput.setTransportVersion(version); + final RoleDescriptor serialized = new RoleDescriptor(streamInput); + if (descriptor.hasDescription()) { + assertThat( + serialized, + equalTo( + new RoleDescriptor( + descriptor.getName(), + descriptor.getClusterPrivileges(), + descriptor.getIndicesPrivileges(), + descriptor.getApplicationPrivileges(), + descriptor.getConditionalClusterPrivileges(), + descriptor.getRunAs(), + descriptor.getMetadata(), + descriptor.getTransientMetadata(), + descriptor.getRemoteIndicesPrivileges(), + descriptor.getRemoteClusterPermissions(), + descriptor.getRestriction(), + null + ) + ) + ); + } else { + assertThat(descriptor, equalTo(serialized)); + } + } + + public void testParseRoleWithDescriptionFailsWhenAllowDescriptionIsFalse() { + final String json = """ + { + "description": "Lorem ipsum", + "cluster": ["manage_security"] + }"""; + final ElasticsearchParseException e = expectThrows( + ElasticsearchParseException.class, + () -> RoleDescriptor.parserBuilder() + .allowRestriction(randomBoolean()) + .allowDescription(false) + .build() + .parse( + "test_role_with_description", + XContentHelper.createParser(XContentParserConfiguration.EMPTY, new BytesArray(json), XContentType.JSON) + ) + ); + assertThat( + e, + TestMatchers.throwableWithMessage( + containsString("failed to parse role [test_role_with_description]. unexpected field [description]") + ) + ); + } + + public void testParseRoleWithDescriptionWhenAllowDescriptionIsTrue() throws IOException { + final String json = """ + { + "description": "Lorem ipsum", + "cluster": ["manage_security"] + }"""; + RoleDescriptor role = RoleDescriptor.parserBuilder() + .allowRestriction(randomBoolean()) + .allowDescription(true) + .build() + .parse( + "test_role_with_description", + XContentHelper.createParser(XContentParserConfiguration.EMPTY, new BytesArray(json), XContentType.JSON) + ); + assertThat(role.getName(), equalTo("test_role_with_description")); + assertThat(role.getDescription(), equalTo("Lorem ipsum")); + assertThat(role.getClusterPrivileges(), arrayContaining("manage_security")); + } + public void testParseEmptyQuery() throws Exception { String json = """ { @@ -1148,6 +1259,7 @@ public void testIsEmpty() { new HashMap<>(), new RoleDescriptor.RemoteIndicesPrivileges[0], RemoteClusterPermissions.NONE, + null, null ).isEmpty() ); @@ -1189,7 +1301,8 @@ public void testIsEmpty() { : new RoleDescriptor.RemoteIndicesPrivileges[] { RoleDescriptor.RemoteIndicesPrivileges.builder("rmt").indices("idx").privileges("foo").build() }, booleans.get(7) ? null : randomRemoteClusterPermissions(5), - booleans.get(8) ? null : RoleRestrictionTests.randomWorkflowsRestriction(1, 2) + booleans.get(8) ? null : RoleRestrictionTests.randomWorkflowsRestriction(1, 2), + randomAlphaOfLengthBetween(0, 20) ); if (booleans.stream().anyMatch(e -> e.equals(false))) { @@ -1212,11 +1325,18 @@ public void testHasPrivilegesOtherThanIndex() { null, null, null, + null, null ).hasUnsupportedPrivilegesInsideAPIKeyConnectedRemoteCluster(), is(false) ); - final RoleDescriptor roleDescriptor = randomRoleDescriptor(); + final RoleDescriptor roleDescriptor = RoleDescriptorTestHelper.builder() + .allowReservedMetadata(true) + .allowRemoteIndices(true) + .allowRestriction(true) + .allowDescription(true) + .allowRemoteClusters(true) + .build(); final boolean expected = roleDescriptor.hasClusterPrivileges() || roleDescriptor.hasConfigurableClusterPrivileges() || roleDescriptor.hasApplicationPrivileges() @@ -1225,234 +1345,8 @@ public void testHasPrivilegesOtherThanIndex() { assertThat(roleDescriptor.hasUnsupportedPrivilegesInsideAPIKeyConnectedRemoteCluster(), equalTo(expected)); } - public static List randomUniquelyNamedRoleDescriptors(int minSize, int maxSize) { - return randomValueOtherThanMany( - roleDescriptors -> roleDescriptors.stream().map(RoleDescriptor::getName).distinct().count() != roleDescriptors.size(), - () -> randomList(minSize, maxSize, () -> randomRoleDescriptor(false)) - ); - } - - public static RoleDescriptor randomRoleDescriptor() { - return randomRoleDescriptor(true); - } - - public static RoleDescriptor randomRoleDescriptor(boolean allowReservedMetadata) { - return randomRoleDescriptor(allowReservedMetadata, false, false, false); - } - - public static RoleDescriptor randomRoleDescriptor( - boolean allowReservedMetadata, - boolean allowRemoteIndices, - boolean allowWorkflows, - boolean allowRemoteClusters - ) { - final RoleDescriptor.RemoteIndicesPrivileges[] remoteIndexPrivileges; - if (false == allowRemoteIndices || randomBoolean()) { - remoteIndexPrivileges = null; - } else { - remoteIndexPrivileges = randomRemoteIndicesPrivileges(0, 3); - } - - RemoteClusterPermissions remoteClusters = RemoteClusterPermissions.NONE; - if (allowRemoteClusters && randomBoolean()) { - randomRemoteClusterPermissions(randomIntBetween(1, 5)); - } - - return new RoleDescriptor( - randomAlphaOfLengthBetween(3, 90), - randomSubsetOf(ClusterPrivilegeResolver.names()).toArray(String[]::new), - randomIndicesPrivileges(0, 3), - randomApplicationPrivileges(), - randomClusterPrivileges(), - generateRandomStringArray(5, randomIntBetween(2, 8), false, true), - randomRoleDescriptorMetadata(allowReservedMetadata), - Map.of(), - remoteIndexPrivileges, - remoteClusters, - allowWorkflows ? RoleRestrictionTests.randomWorkflowsRestriction(1, 3) : null - ); - } - - public static Map randomRoleDescriptorMetadata(boolean allowReservedMetadata) { - final Map metadata = new HashMap<>(); - while (randomBoolean()) { - String key = randomAlphaOfLengthBetween(4, 12); - if (allowReservedMetadata && randomBoolean()) { - key = MetadataUtils.RESERVED_PREFIX + key; - } - final Object value = randomBoolean() ? randomInt() : randomAlphaOfLengthBetween(3, 50); - metadata.put(key, value); - } - return metadata; - } - - public static ConfigurableClusterPrivilege[] randomClusterPrivileges() { - final ConfigurableClusterPrivilege[] configurableClusterPrivileges = switch (randomIntBetween(0, 4)) { - case 0 -> new ConfigurableClusterPrivilege[0]; - case 1 -> new ConfigurableClusterPrivilege[] { - new ConfigurableClusterPrivileges.ManageApplicationPrivileges( - Sets.newHashSet(generateRandomStringArray(3, randomIntBetween(4, 12), false, false)) - ) }; - case 2 -> new ConfigurableClusterPrivilege[] { - new ConfigurableClusterPrivileges.WriteProfileDataPrivileges( - Sets.newHashSet(generateRandomStringArray(3, randomIntBetween(4, 12), false, false)) - ) }; - case 3 -> new ConfigurableClusterPrivilege[] { - new ConfigurableClusterPrivileges.WriteProfileDataPrivileges( - Sets.newHashSet(generateRandomStringArray(3, randomIntBetween(4, 12), false, false)) - ), - new ConfigurableClusterPrivileges.ManageApplicationPrivileges( - Sets.newHashSet(generateRandomStringArray(3, randomIntBetween(4, 12), false, false)) - ) }; - case 4 -> new ConfigurableClusterPrivilege[] { - new ConfigurableClusterPrivileges.ManageApplicationPrivileges( - Sets.newHashSet(generateRandomStringArray(3, randomIntBetween(4, 12), false, false)) - ), - new ConfigurableClusterPrivileges.WriteProfileDataPrivileges( - Sets.newHashSet(generateRandomStringArray(3, randomIntBetween(4, 12), false, false)) - ) }; - default -> throw new IllegalStateException("Unexpected value"); - }; - return configurableClusterPrivileges; - } - - public static ApplicationResourcePrivileges[] randomApplicationPrivileges() { - final ApplicationResourcePrivileges[] applicationPrivileges = new ApplicationResourcePrivileges[randomIntBetween(0, 2)]; - for (int i = 0; i < applicationPrivileges.length; i++) { - final ApplicationResourcePrivileges.Builder builder = ApplicationResourcePrivileges.builder(); - builder.application("app" + randomAlphaOfLengthBetween(5, 12) + (randomBoolean() ? "*" : "")); - if (randomBoolean()) { - builder.privileges("*"); - } else { - builder.privileges(generateRandomStringArray(6, randomIntBetween(4, 8), false, false)); - } - if (randomBoolean()) { - builder.resources("*"); - } else { - builder.resources(generateRandomStringArray(6, randomIntBetween(4, 8), false, false)); - } - applicationPrivileges[i] = builder.build(); - } - return applicationPrivileges; - } - - public static RemoteClusterPermissions randomRemoteClusterPermissions(int maxGroups) { - final RemoteClusterPermissions remoteClusterPermissions = new RemoteClusterPermissions(); - final String[] supportedPermissions = RemoteClusterPermissions.getSupportedRemoteClusterPermissions().toArray(new String[0]); - for (int i = 0; i < maxGroups; i++) { - remoteClusterPermissions.addGroup( - new RemoteClusterPermissionGroup( - randomNonEmptySubsetOf(Arrays.asList(supportedPermissions)).toArray(new String[0]), - generateRandomStringArray(5, randomIntBetween(3, 9), false, false) - ) - ); - } - return remoteClusterPermissions; - } - - public static RoleDescriptor.RemoteIndicesPrivileges[] randomRemoteIndicesPrivileges(int min, int max) { - return randomRemoteIndicesPrivileges(min, max, Set.of()); - } - - public static RoleDescriptor.RemoteIndicesPrivileges[] randomRemoteIndicesPrivileges(int min, int max, Set excludedPrivileges) { - final RoleDescriptor.IndicesPrivileges[] innerIndexPrivileges = randomIndicesPrivileges(min, max, excludedPrivileges); - final RoleDescriptor.RemoteIndicesPrivileges[] remoteIndexPrivileges = - new RoleDescriptor.RemoteIndicesPrivileges[innerIndexPrivileges.length]; - for (int i = 0; i < remoteIndexPrivileges.length; i++) { - remoteIndexPrivileges[i] = new RoleDescriptor.RemoteIndicesPrivileges( - innerIndexPrivileges[i], - generateRandomStringArray(5, randomIntBetween(3, 9), false, false) - ); - } - return remoteIndexPrivileges; - } - - public static RoleDescriptor.IndicesPrivileges[] randomIndicesPrivileges(int min, int max) { - return randomIndicesPrivileges(min, max, Set.of()); - } - - public static RoleDescriptor.IndicesPrivileges[] randomIndicesPrivileges(int min, int max, Set excludedPrivileges) { - final RoleDescriptor.IndicesPrivileges[] indexPrivileges = new RoleDescriptor.IndicesPrivileges[randomIntBetween(min, max)]; - for (int i = 0; i < indexPrivileges.length; i++) { - indexPrivileges[i] = randomIndicesPrivilegesBuilder(excludedPrivileges).build(); - } - return indexPrivileges; - } - - private static RoleDescriptor.IndicesPrivileges.Builder randomIndicesPrivilegesBuilder() { - return randomIndicesPrivilegesBuilder(Set.of()); - } - - private static RoleDescriptor.IndicesPrivileges.Builder randomIndicesPrivilegesBuilder(Set excludedPrivileges) { - final Set candidatePrivilegesNames = Sets.difference(IndexPrivilege.names(), excludedPrivileges); - assert false == candidatePrivilegesNames.isEmpty() : "no candidate privilege names to random from"; - final RoleDescriptor.IndicesPrivileges.Builder builder = RoleDescriptor.IndicesPrivileges.builder() - .privileges(randomSubsetOf(randomIntBetween(1, 4), candidatePrivilegesNames)) - .indices(generateRandomStringArray(5, randomIntBetween(3, 9), false, false)) - .allowRestrictedIndices(randomBoolean()); - randomDlsFls(builder); - return builder; - } - - private static void randomDlsFls(RoleDescriptor.IndicesPrivileges.Builder builder) { - if (randomBoolean()) { - builder.query( - randomBoolean() - ? "{ \"term\": { \"" + randomAlphaOfLengthBetween(3, 24) + "\" : \"" + randomAlphaOfLengthBetween(3, 24) + "\" }" - : "{ \"match_all\": {} }" - ); - } - if (randomBoolean()) { - if (randomBoolean()) { - builder.grantedFields("*"); - builder.deniedFields(generateRandomStringArray(4, randomIntBetween(4, 9), false, false)); - } else { - builder.grantedFields(generateRandomStringArray(4, randomIntBetween(4, 9), false, false)); - } - } - } - private static void resetFieldPermssionsCache() { RoleDescriptor.setFieldPermissionsCache(new FieldPermissionsCache(Settings.EMPTY)); } - public static RoleDescriptor randomCrossClusterAccessRoleDescriptor() { - final int searchSize = randomIntBetween(0, 3); - final int replicationSize = randomIntBetween(searchSize == 0 ? 1 : 0, 3); - assert searchSize + replicationSize > 0; - - final String[] clusterPrivileges; - if (searchSize > 0 && replicationSize > 0) { - clusterPrivileges = CCS_AND_CCR_CLUSTER_PRIVILEGE_NAMES; - } else if (searchSize > 0) { - clusterPrivileges = CCS_CLUSTER_PRIVILEGE_NAMES; - } else { - clusterPrivileges = CCR_CLUSTER_PRIVILEGE_NAMES; - } - - final List indexPrivileges = new ArrayList<>(); - for (int i = 0; i < searchSize; i++) { - final RoleDescriptor.IndicesPrivileges.Builder builder = RoleDescriptor.IndicesPrivileges.builder() - .privileges(CCS_INDICES_PRIVILEGE_NAMES) - .indices(generateRandomStringArray(5, randomIntBetween(3, 9), false, false)) - .allowRestrictedIndices(randomBoolean()); - randomDlsFls(builder); - indexPrivileges.add(builder.build()); - } - for (int i = 0; i < replicationSize; i++) { - final RoleDescriptor.IndicesPrivileges.Builder builder = RoleDescriptor.IndicesPrivileges.builder() - .privileges(CCR_INDICES_PRIVILEGE_NAMES) - .indices(generateRandomStringArray(5, randomIntBetween(3, 9), false, false)) - .allowRestrictedIndices(randomBoolean()); - indexPrivileges.add(builder.build()); - } - - return new RoleDescriptor( - ROLE_DESCRIPTOR_NAME, - clusterPrivileges, - indexPrivileges.toArray(RoleDescriptor.IndicesPrivileges[]::new), - null - ); - } - } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorsIntersectionTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorsIntersectionTests.java index 6f8691fbb317a..a892e8b864e6e 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorsIntersectionTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorsIntersectionTests.java @@ -27,7 +27,7 @@ import java.util.List; import java.util.Set; -import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTests.randomUniquelyNamedRoleDescriptors; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomUniquelyNamedRoleDescriptors; import static org.hamcrest.Matchers.equalTo; public class RoleDescriptorsIntersectionTests extends ESTestCase { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/SimpleRoleTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/SimpleRoleTests.java index 0c15256d1951e..5401be220fe8b 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/SimpleRoleTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/SimpleRoleTests.java @@ -276,7 +276,8 @@ public void testForWorkflowWithRestriction() { null, null, null, - new RoleDescriptor.Restriction(new String[] { WorkflowResolver.SEARCH_APPLICATION_QUERY_WORKFLOW.name() }) + new RoleDescriptor.Restriction(new String[] { WorkflowResolver.SEARCH_APPLICATION_QUERY_WORKFLOW.name() }), + null ), new FieldPermissionsCache(Settings.EMPTY), RESTRICTED_INDICES, @@ -290,7 +291,7 @@ public void testForWorkflowWithRestriction() { public void testForWorkflowWithoutRestriction() { final SimpleRole role = Role.buildFromRoleDescriptor( - new RoleDescriptor("r1", null, null, null, null, null, null, null, null, null, null), + new RoleDescriptor("r1", null, null, null, null, null, null, null, null, null, null, null), new FieldPermissionsCache(Settings.EMPTY), RESTRICTED_INDICES, List.of() diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java index d15fb9a1409dd..ad73944f4c64d 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java @@ -460,7 +460,12 @@ public void testSlmPrivileges() { } { - verifyClusterActionAllowed(ClusterPrivilegeResolver.READ_SLM, "cluster:admin/slm/get", "cluster:admin/ilm/operation_mode/get"); + verifyClusterActionAllowed( + ClusterPrivilegeResolver.READ_SLM, + "cluster:admin/slm/get", + "cluster:admin/slm/status", + "cluster:admin/ilm/operation_mode/get" + ); verifyClusterActionDenied( ClusterPrivilegeResolver.READ_SLM, "cluster:admin/slm/delete", diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/RoleReferenceTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/RoleReferenceTests.java index 554c82dfa44fb..74c8e6addf243 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/RoleReferenceTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/RoleReferenceTests.java @@ -13,7 +13,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.security.authc.CrossClusterAccessSubjectInfo; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; -import org.elasticsearch.xpack.core.security.authz.RoleDescriptorTests; +import org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper; import java.util.Set; import java.util.concurrent.ExecutionException; @@ -82,7 +82,7 @@ public void testCrossClusterAccessRoleReference() { } public void testFixedRoleReference() throws ExecutionException, InterruptedException { - final RoleDescriptor roleDescriptor = RoleDescriptorTests.randomRoleDescriptor(); + final RoleDescriptor roleDescriptor = RoleDescriptorTestHelper.randomRoleDescriptor(); final String source = "source"; final var fixedRoleReference = new RoleReference.FixedRoleReference(roleDescriptor, source); diff --git a/x-pack/plugin/core/template-resources/src/main/resources/ecs@mappings.json b/x-pack/plugin/core/template-resources/src/main/resources/ecs@mappings.json index 3eae6c1fa4f5a..1951431859ffe 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/ecs@mappings.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/ecs@mappings.json @@ -189,8 +189,7 @@ "type": "geo_point" }, "path_match": [ - "location", - "*.location" + "*.geo.location" ] } }, diff --git a/x-pack/plugin/core/template-resources/src/main/resources/fleet-actions-results.json b/x-pack/plugin/core/template-resources/src/main/resources/fleet-actions-results.json index 88c51a9aef284..85a744200162c 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/fleet-actions-results.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/fleet-actions-results.json @@ -53,6 +53,9 @@ }, "completed_at": { "type": "date" + }, + "namespaces": { + "type": "keyword" } } } diff --git a/x-pack/plugin/core/template-resources/src/main/resources/fleet-actions.json b/x-pack/plugin/core/template-resources/src/main/resources/fleet-actions.json index 2b3ecbac92352..8702a098da826 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/fleet-actions.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/fleet-actions.json @@ -56,6 +56,9 @@ "type": "binary" } } + }, + "namespaces": { + "type": "keyword" } } } diff --git a/x-pack/plugin/core/template-resources/src/main/resources/fleet-agents.json b/x-pack/plugin/core/template-resources/src/main/resources/fleet-agents.json index 6a89d7874c073..ad66ad8796862 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/fleet-agents.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/fleet-agents.json @@ -316,6 +316,9 @@ }, "tags": { "type": "keyword" + }, + "namespaces": { + "type": "keyword" } } } diff --git a/x-pack/plugin/core/template-resources/src/main/resources/fleet-enrollment-api-keys.json b/x-pack/plugin/core/template-resources/src/main/resources/fleet-enrollment-api-keys.json index 6be455e02825a..b2a116c0c592e 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/fleet-enrollment-api-keys.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/fleet-enrollment-api-keys.json @@ -33,6 +33,9 @@ }, "updated_at": { "type": "date" + }, + "namespaces": { + "type": "keyword" } } } diff --git a/x-pack/plugin/core/template-resources/src/main/resources/fleet-file-fromhost-data.json b/x-pack/plugin/core/template-resources/src/main/resources/fleet-file-fromhost-data.json index 698e4359e73c1..20e9ccf8daff3 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/fleet-file-fromhost-data.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/fleet-file-fromhost-data.json @@ -38,6 +38,9 @@ "last": { "type": "boolean", "index": false + }, + "namespaces": { + "type": "keyword" } } } diff --git a/x-pack/plugin/core/template-resources/src/main/resources/fleet-file-fromhost-meta.json b/x-pack/plugin/core/template-resources/src/main/resources/fleet-file-fromhost-meta.json index 268e53a9470a8..9bf0c8b23f5ad 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/fleet-file-fromhost-meta.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/fleet-file-fromhost-meta.json @@ -25,6 +25,9 @@ "@timestamp": { "type": "date" }, + "namespaces": { + "type": "keyword" + }, "upload_start": { "type": "date" }, diff --git a/x-pack/plugin/core/template-resources/src/main/resources/fleet-file-tohost-data.json b/x-pack/plugin/core/template-resources/src/main/resources/fleet-file-tohost-data.json index 7247920e5e293..7c990600749d3 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/fleet-file-tohost-data.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/fleet-file-tohost-data.json @@ -25,6 +25,9 @@ "@timestamp": { "type": "date" }, + "namespaces": { + "type": "keyword" + }, "data": { "type": "binary", "store": true diff --git a/x-pack/plugin/core/template-resources/src/main/resources/fleet-file-tohost-meta.json b/x-pack/plugin/core/template-resources/src/main/resources/fleet-file-tohost-meta.json index bdf7e4d00d869..84a3fe05777a9 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/fleet-file-tohost-meta.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/fleet-file-tohost-meta.json @@ -25,6 +25,9 @@ "@timestamp": { "type": "date" }, + "namespaces": { + "type": "keyword" + }, "agent_id": { "type": "keyword" }, diff --git a/x-pack/plugin/core/template-resources/src/main/resources/fleet-policies.json b/x-pack/plugin/core/template-resources/src/main/resources/fleet-policies.json index 44e2e67dd06c3..79b4ed0109f32 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/fleet-policies.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/fleet-policies.json @@ -13,6 +13,9 @@ "coordinator_idx": { "type": "integer" }, + "namespaces": { + "type": "keyword" + }, "data": { "enabled": false, "type": "object" diff --git a/x-pack/plugin/core/template-resources/src/main/resources/kibana-reporting@settings.json b/x-pack/plugin/core/template-resources/src/main/resources/kibana-reporting@settings.json new file mode 100644 index 0000000000000..933d7681c92e8 --- /dev/null +++ b/x-pack/plugin/core/template-resources/src/main/resources/kibana-reporting@settings.json @@ -0,0 +1,14 @@ +{ + "template": { + "settings": { + "number_of_shards": 1, + "auto_expand_replicas": "0-1" + } + }, + "_meta": { + "description": "default kibana reporting settings installed by elasticsearch", + "managed": true + }, + "version": ${xpack.stack.template.version}, + "deprecated": ${xpack.stack.template.deprecated} +} diff --git a/x-pack/plugin/core/template-resources/src/main/resources/kibana-reporting@template.json b/x-pack/plugin/core/template-resources/src/main/resources/kibana-reporting@template.json index 9c4da646c3399..240ad36199fe3 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/kibana-reporting@template.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/kibana-reporting@template.json @@ -5,14 +5,10 @@ "hidden": true }, "allow_auto_create": true, - "composed_of": ["kibana-reporting@custom"], + "composed_of": ["kibana-reporting@settings", "kibana-reporting@custom"], "ignore_missing_component_templates": ["kibana-reporting@custom"], "template": { "lifecycle": {}, - "settings": { - "number_of_shards": 1, - "auto_expand_replicas": "0-1" - }, "mappings": { "properties": { "meta": { diff --git a/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-events.json b/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-events.json index c540a61c28f05..fbba399162ee0 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-events.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-events.json @@ -10,11 +10,10 @@ "sort": { "field": [ "profiling.project.id", - "@timestamp", "orchestrator.resource.name", + "host.name", "container.name", - "process.thread.name", - "host.id" + "process.thread.name" ] } }, diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java index 99240d6b6d49d..333ef30f078e6 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java @@ -823,7 +823,7 @@ public void updateConnectorNative(UpdateConnectorNativeAction.Request request, A Connector.IS_NATIVE_FIELD.getPreferredName(), request.isNative(), Connector.STATUS_FIELD.getPreferredName(), - ConnectorStatus.CONFIGURED + ConnectorStatus.CONFIGURED.toString() ) ) @@ -969,7 +969,7 @@ public void updateConnectorServiceType(UpdateConnectorServiceTypeAction.Request Connector.SERVICE_TYPE_FIELD.getPreferredName(), request.getServiceType(), Connector.STATUS_FIELD.getPreferredName(), - newStatus + newStatus.toString() ) ) diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java index f5ab8309e27e7..4316b4bccd9bc 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java @@ -266,7 +266,7 @@ public void cancelConnectorSyncJob(String connectorSyncJobId, ActionListener lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return new BooleanLookup(asBlock(), positions, targetBlockSize); + } + public static long ramBytesEstimated(boolean[] values) { return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(values); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBigArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBigArrayVector.java index 5f6db129e73d3..9215cd0d9bbda 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBigArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBigArrayVector.java @@ -10,8 +10,10 @@ import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BitArray; import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; @@ -87,6 +89,11 @@ public BooleanVector filter(int... positions) { return new BooleanBigArrayVector(filtered, positions.length, blockFactory); } + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return new BooleanLookup(asBlock(), positions, targetBlockSize); + } + @Override public void closeInternal() { // The circuit breaker that tracks the values {@link BitArray} is adjusted outside diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVector.java index 7218f3d2771c8..c8921a7c9f02e 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVector.java @@ -10,6 +10,8 @@ import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; @@ -27,6 +29,9 @@ public sealed interface BooleanVector extends Vector permits ConstantBooleanVect @Override BooleanVector filter(int... positions); + @Override + ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize); + /** * Compares the given object with this vector for equality. Returns {@code true} if and only if the * given object is a BooleanVector, and both vectors are {@link #equals(BooleanVector, BooleanVector) equal}. diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBlock.java index 013718bb42a7d..193e6ea5d8965 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBlock.java @@ -52,9 +52,8 @@ public BooleanBlock filter(int... positions) { } @Override - public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { - // TODO optimizations - return new BooleanLookup(this, positions, targetBlockSize); + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return vector.lookup(positions, targetBlockSize); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayVector.java index 81f507a4fa55a..61bbfb5ebbd02 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayVector.java @@ -11,7 +11,9 @@ import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BytesRefArray; +import org.elasticsearch.core.ReleasableIterator; import org.elasticsearch.core.Releasables; import java.io.IOException; @@ -25,7 +27,9 @@ final class BytesRefArrayVector extends AbstractVector implements BytesRefVector static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(BytesRefArrayVector.class) // TODO: remove these extra bytes once `asBlock` returns a block with a separate reference to the vector. - + RamUsageEstimator.shallowSizeOfInstance(BytesRefVectorBlock.class); + + RamUsageEstimator.shallowSizeOfInstance(BytesRefVectorBlock.class) + // TODO: remove this if/when we account for memory used by Pages + + Block.PAGE_MEM_OVERHEAD_PER_BLOCK; private final BytesRefArray values; @@ -89,6 +93,11 @@ public BytesRefVector filter(int... positions) { } } + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return new BytesRefLookup(asBlock(), positions, targetBlockSize); + } + public static long ramBytesEstimated(BytesRefArray values) { return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(values); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlockBuilder.java index 49075789ed4a4..6232cbdd2717c 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlockBuilder.java @@ -21,10 +21,6 @@ final class BytesRefBlockBuilder extends AbstractBlockBuilder implements BytesRe private BytesRefArray values; - BytesRefBlockBuilder(int estimatedSize, BlockFactory blockFactory) { - this(estimatedSize, BigArrays.NON_RECYCLING_INSTANCE, blockFactory); - } - BytesRefBlockBuilder(int estimatedSize, BigArrays bigArrays, BlockFactory blockFactory) { super(blockFactory); values = new BytesRefArray(Math.max(estimatedSize, 2), bigArrays); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVector.java index 4f07ca2d61049..3739dccb0f956 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVector.java @@ -11,6 +11,8 @@ import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; @@ -34,6 +36,9 @@ public sealed interface BytesRefVector extends Vector permits ConstantBytesRefVe @Override BytesRefVector filter(int... positions); + @Override + ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize); + /** * Compares the given object with this vector for equality. Returns {@code true} if and only if the * given object is a BytesRefVector, and both vectors are {@link #equals(BytesRefVector, BytesRefVector) equal}. diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBlock.java index 39bd37ea9bc34..16a8fc0888096 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBlock.java @@ -63,9 +63,8 @@ public BytesRefBlock filter(int... positions) { } @Override - public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { - // TODO optimizations - return new BytesRefLookup(this, positions, targetBlockSize); + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return vector.lookup(positions, targetBlockSize); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBooleanVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBooleanVector.java index 16d70d1a0e800..1f6786f64e0a9 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBooleanVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBooleanVector.java @@ -8,6 +8,8 @@ package org.elasticsearch.compute.data; import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; /** * Vector implementation that stores a constant boolean value. @@ -39,6 +41,28 @@ public BooleanVector filter(int... positions) { return blockFactory().newConstantBooleanVector(value, positions.length); } + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + if (positions.getPositionCount() == 0) { + return ReleasableIterator.empty(); + } + IntVector positionsVector = positions.asVector(); + if (positionsVector == null) { + return new BooleanLookup(asBlock(), positions, targetBlockSize); + } + int min = positionsVector.min(); + if (min < 0) { + throw new IllegalArgumentException("invalid position [" + min + "]"); + } + if (min > getPositionCount()) { + return ReleasableIterator.single((BooleanBlock) positions.blockFactory().newConstantNullBlock(positions.getPositionCount())); + } + if (positionsVector.max() < getPositionCount()) { + return ReleasableIterator.single(positions.blockFactory().newConstantBooleanBlockWith(value, positions.getPositionCount())); + } + return new BooleanLookup(asBlock(), positions, targetBlockSize); + } + @Override public ElementType elementType() { return ElementType.BOOLEAN; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBytesRefVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBytesRefVector.java index eed780a42f7ba..33967d66374c1 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBytesRefVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBytesRefVector.java @@ -9,6 +9,8 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; /** * Vector implementation that stores a constant BytesRef value. @@ -45,6 +47,28 @@ public BytesRefVector filter(int... positions) { return blockFactory().newConstantBytesRefVector(value, positions.length); } + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + if (positions.getPositionCount() == 0) { + return ReleasableIterator.empty(); + } + IntVector positionsVector = positions.asVector(); + if (positionsVector == null) { + return new BytesRefLookup(asBlock(), positions, targetBlockSize); + } + int min = positionsVector.min(); + if (min < 0) { + throw new IllegalArgumentException("invalid position [" + min + "]"); + } + if (min > getPositionCount()) { + return ReleasableIterator.single((BytesRefBlock) positions.blockFactory().newConstantNullBlock(positions.getPositionCount())); + } + if (positionsVector.max() < getPositionCount()) { + return ReleasableIterator.single(positions.blockFactory().newConstantBytesRefBlockWith(value, positions.getPositionCount())); + } + return new BytesRefLookup(asBlock(), positions, targetBlockSize); + } + @Override public ElementType elementType() { return ElementType.BYTES_REF; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantDoubleVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantDoubleVector.java index a783f0243313e..1ddf31d753d43 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantDoubleVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantDoubleVector.java @@ -8,6 +8,8 @@ package org.elasticsearch.compute.data; import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; /** * Vector implementation that stores a constant double value. @@ -39,6 +41,28 @@ public DoubleVector filter(int... positions) { return blockFactory().newConstantDoubleVector(value, positions.length); } + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + if (positions.getPositionCount() == 0) { + return ReleasableIterator.empty(); + } + IntVector positionsVector = positions.asVector(); + if (positionsVector == null) { + return new DoubleLookup(asBlock(), positions, targetBlockSize); + } + int min = positionsVector.min(); + if (min < 0) { + throw new IllegalArgumentException("invalid position [" + min + "]"); + } + if (min > getPositionCount()) { + return ReleasableIterator.single((DoubleBlock) positions.blockFactory().newConstantNullBlock(positions.getPositionCount())); + } + if (positionsVector.max() < getPositionCount()) { + return ReleasableIterator.single(positions.blockFactory().newConstantDoubleBlockWith(value, positions.getPositionCount())); + } + return new DoubleLookup(asBlock(), positions, targetBlockSize); + } + @Override public ElementType elementType() { return ElementType.DOUBLE; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantIntVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantIntVector.java index c6d463af7cfad..e8fb8cb39ceb4 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantIntVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantIntVector.java @@ -8,6 +8,8 @@ package org.elasticsearch.compute.data; import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; /** * Vector implementation that stores a constant int value. @@ -39,6 +41,28 @@ public IntVector filter(int... positions) { return blockFactory().newConstantIntVector(value, positions.length); } + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + if (positions.getPositionCount() == 0) { + return ReleasableIterator.empty(); + } + IntVector positionsVector = positions.asVector(); + if (positionsVector == null) { + return new IntLookup(asBlock(), positions, targetBlockSize); + } + int min = positionsVector.min(); + if (min < 0) { + throw new IllegalArgumentException("invalid position [" + min + "]"); + } + if (min > getPositionCount()) { + return ReleasableIterator.single((IntBlock) positions.blockFactory().newConstantNullBlock(positions.getPositionCount())); + } + if (positionsVector.max() < getPositionCount()) { + return ReleasableIterator.single(positions.blockFactory().newConstantIntBlockWith(value, positions.getPositionCount())); + } + return new IntLookup(asBlock(), positions, targetBlockSize); + } + /** * The minimum value in the block. */ diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantLongVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantLongVector.java index 0173f1c1d4d7a..b997cbbe22849 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantLongVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantLongVector.java @@ -8,6 +8,8 @@ package org.elasticsearch.compute.data; import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; /** * Vector implementation that stores a constant long value. @@ -39,6 +41,28 @@ public LongVector filter(int... positions) { return blockFactory().newConstantLongVector(value, positions.length); } + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + if (positions.getPositionCount() == 0) { + return ReleasableIterator.empty(); + } + IntVector positionsVector = positions.asVector(); + if (positionsVector == null) { + return new LongLookup(asBlock(), positions, targetBlockSize); + } + int min = positionsVector.min(); + if (min < 0) { + throw new IllegalArgumentException("invalid position [" + min + "]"); + } + if (min > getPositionCount()) { + return ReleasableIterator.single((LongBlock) positions.blockFactory().newConstantNullBlock(positions.getPositionCount())); + } + if (positionsVector.max() < getPositionCount()) { + return ReleasableIterator.single(positions.blockFactory().newConstantLongBlockWith(value, positions.getPositionCount())); + } + return new LongLookup(asBlock(), positions, targetBlockSize); + } + @Override public ElementType elementType() { return ElementType.LONG; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayVector.java index 451b6cc7b655b..e7c1d342133d5 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayVector.java @@ -10,6 +10,8 @@ import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; import java.util.stream.Collectors; @@ -23,7 +25,9 @@ final class DoubleArrayVector extends AbstractVector implements DoubleVector { static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(DoubleArrayVector.class) // TODO: remove these extra bytes once `asBlock` returns a block with a separate reference to the vector. - + RamUsageEstimator.shallowSizeOfInstance(DoubleVectorBlock.class); + + RamUsageEstimator.shallowSizeOfInstance(DoubleVectorBlock.class) + // TODO: remove this if/when we account for memory used by Pages + + Block.PAGE_MEM_OVERHEAD_PER_BLOCK; private final double[] values; @@ -88,6 +92,11 @@ public DoubleVector filter(int... positions) { } } + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return new DoubleLookup(asBlock(), positions, targetBlockSize); + } + public static long ramBytesEstimated(double[] values) { return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(values); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBigArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBigArrayVector.java index 8f6aedf31b50e..d558eabd2dd4c 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBigArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBigArrayVector.java @@ -10,8 +10,10 @@ import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.DoubleArray; import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; @@ -86,6 +88,11 @@ public DoubleVector filter(int... positions) { return new DoubleBigArrayVector(filtered, positions.length, blockFactory); } + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return new DoubleLookup(asBlock(), positions, targetBlockSize); + } + @Override public void closeInternal() { // The circuit breaker that tracks the values {@link DoubleArray} is adjusted outside diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVector.java index 1d71575b33316..3d93043f93d8f 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVector.java @@ -10,6 +10,8 @@ import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; @@ -27,6 +29,9 @@ public sealed interface DoubleVector extends Vector permits ConstantDoubleVector @Override DoubleVector filter(int... positions); + @Override + ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize); + /** * Compares the given object with this vector for equality. Returns {@code true} if and only if the * given object is a DoubleVector, and both vectors are {@link #equals(DoubleVector, DoubleVector) equal}. diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBlock.java index e76a4e0c5fdee..24887bebcd838 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBlock.java @@ -52,9 +52,8 @@ public DoubleBlock filter(int... positions) { } @Override - public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { - // TODO optimizations - return new DoubleLookup(this, positions, targetBlockSize); + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return vector.lookup(positions, targetBlockSize); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayVector.java index a2b6697a38634..e9d9a6b3fb958 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayVector.java @@ -10,6 +10,8 @@ import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; import java.util.stream.Collectors; @@ -23,7 +25,9 @@ final class IntArrayVector extends AbstractVector implements IntVector { static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(IntArrayVector.class) // TODO: remove these extra bytes once `asBlock` returns a block with a separate reference to the vector. - + RamUsageEstimator.shallowSizeOfInstance(IntVectorBlock.class); + + RamUsageEstimator.shallowSizeOfInstance(IntVectorBlock.class) + // TODO: remove this if/when we account for memory used by Pages + + Block.PAGE_MEM_OVERHEAD_PER_BLOCK; private final int[] values; @@ -98,6 +102,11 @@ public IntVector filter(int... positions) { } } + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return new IntLookup(asBlock(), positions, targetBlockSize); + } + public static long ramBytesEstimated(int[] values) { return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(values); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBigArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBigArrayVector.java index fe89782bad0ec..df8298b87237e 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBigArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBigArrayVector.java @@ -10,8 +10,10 @@ import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.IntArray; import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; @@ -126,6 +128,11 @@ public IntVector filter(int... positions) { return new IntBigArrayVector(filtered, positions.length, blockFactory); } + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return new IntLookup(asBlock(), positions, targetBlockSize); + } + @Override public void closeInternal() { // The circuit breaker that tracks the values {@link IntArray} is adjusted outside diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVector.java index 8f6f42b66fbe6..b1a2d1b80a410 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVector.java @@ -10,6 +10,8 @@ import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; @@ -27,6 +29,9 @@ public sealed interface IntVector extends Vector permits ConstantIntVector, IntA @Override IntVector filter(int... positions); + @Override + ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize); + /** * The minimum value in the Vector. An empty Vector will return {@link Integer#MAX_VALUE}. */ diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBlock.java index 70bcf6919bea6..ae28fb9f6ffa6 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBlock.java @@ -52,9 +52,8 @@ public IntBlock filter(int... positions) { } @Override - public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { - // TODO optimizations - return new IntLookup(this, positions, targetBlockSize); + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return vector.lookup(positions, targetBlockSize); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayVector.java index 6eec82528c8da..5fa904dcf1acc 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayVector.java @@ -10,6 +10,8 @@ import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; import java.util.stream.Collectors; @@ -23,7 +25,9 @@ final class LongArrayVector extends AbstractVector implements LongVector { static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(LongArrayVector.class) // TODO: remove these extra bytes once `asBlock` returns a block with a separate reference to the vector. - + RamUsageEstimator.shallowSizeOfInstance(LongVectorBlock.class); + + RamUsageEstimator.shallowSizeOfInstance(LongVectorBlock.class) + // TODO: remove this if/when we account for memory used by Pages + + Block.PAGE_MEM_OVERHEAD_PER_BLOCK; private final long[] values; @@ -88,6 +92,11 @@ public LongVector filter(int... positions) { } } + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return new LongLookup(asBlock(), positions, targetBlockSize); + } + public static long ramBytesEstimated(long[] values) { return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(values); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBigArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBigArrayVector.java index d30dedd4cce16..a7828788169ca 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBigArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBigArrayVector.java @@ -10,8 +10,10 @@ import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.LongArray; import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; @@ -86,6 +88,11 @@ public LongVector filter(int... positions) { return new LongBigArrayVector(filtered, positions.length, blockFactory); } + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return new LongLookup(asBlock(), positions, targetBlockSize); + } + @Override public void closeInternal() { // The circuit breaker that tracks the values {@link LongArray} is adjusted outside diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVector.java index 2ebdb89a31262..e2f53d1ee07f4 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVector.java @@ -10,6 +10,8 @@ import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; @@ -27,6 +29,9 @@ public sealed interface LongVector extends Vector permits ConstantLongVector, Lo @Override LongVector filter(int... positions); + @Override + ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize); + /** * Compares the given object with this vector for equality. Returns {@code true} if and only if the * given object is a LongVector, and both vectors are {@link #equals(LongVector, LongVector) equal}. diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBlock.java index b6f1e8e77505d..01921e1195f4a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBlock.java @@ -52,9 +52,8 @@ public LongBlock filter(int... positions) { } @Override - public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { - // TODO optimizations - return new LongLookup(this, positions, targetBlockSize); + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return vector.lookup(positions, targetBlockSize); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java index ed7ee93c99325..9a6b701a2e4ea 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java @@ -8,6 +8,7 @@ package org.elasticsearch.compute.data; import org.apache.lucene.util.Accountable; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.unit.ByteSizeValue; @@ -44,6 +45,17 @@ public interface Block extends Accountable, BlockLoader.Block, NamedWriteable, R */ long MAX_LOOKUP = 100_000; + /** + * We do not track memory for pages directly (only for single blocks), + * but the page memory overhead can still be significant, especially for pages containing thousands of blocks. + * For now, we approximate this overhead, per block, using this value. + * + * The exact overhead per block would be (more correctly) {@link RamUsageEstimator#NUM_BYTES_OBJECT_REF}, + * but we approximate it with {@link RamUsageEstimator#NUM_BYTES_OBJECT_ALIGNMENT} to avoid further alignments + * to object size (at the end of the alignment, it would make no practical difference). + */ + int PAGE_MEM_OVERHEAD_PER_BLOCK = RamUsageEstimator.NUM_BYTES_OBJECT_ALIGNMENT; + /** * {@return an efficient dense single-value view of this block}. * Null, if the block is not dense single-valued. That is, if @@ -127,19 +139,19 @@ public interface Block extends Accountable, BlockLoader.Block, NamedWriteable, R * same number of {@link #getPositionCount() positions} as the {@code positions} * parameter. *

- * For example, this this block contained {@code [a, b, [b, c]]} + * For example, if this block contained {@code [a, b, [b, c]]} * and were called with the block {@code [0, 1, 1, [1, 2]]} then the * result would be {@code [a, b, b, [b, b, c]]}. *

*

* This process produces {@code count(this) * count(positions)} values per - * positions which could be quite quite large. Instead of returning a single + * positions which could be quite large. Instead of returning a single * Block, this returns an Iterator of Blocks containing all of the promised * values. *

*

- * The returned {@link ReleasableIterator} may retain a reference to {@link Block}s - * inside the {@link Page}. Close it to release those references. + * The returned {@link ReleasableIterator} may retain a reference to the + * {@code positions} parameter. Close it to release those references. *

*

* This block is built using the same {@link BlockFactory} as was used to diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullVector.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullVector.java index 4deededdf41c5..a8a6dbaf382f9 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullVector.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullVector.java @@ -9,6 +9,8 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; @@ -45,6 +47,12 @@ public ConstantNullVector filter(int... positions) { throw new UnsupportedOperationException("null vector"); } + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + assert false : "null vector"; + throw new UnsupportedOperationException("null vector"); + } + @Override public boolean getBoolean(int position) { assert false : "null vector"; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java index e5a0d934aa01a..da9ca2bbae270 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java @@ -52,7 +52,7 @@ public Block filter(int... positions) { @Override public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { - throw new UnsupportedOperationException(); + throw new UnsupportedOperationException("can't lookup values from DocBlock"); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocVector.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocVector.java index 067fddd311cc7..33f5797f60df8 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocVector.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocVector.java @@ -9,6 +9,8 @@ import org.apache.lucene.util.IntroSorter; import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; import org.elasticsearch.core.Releasables; import java.util.Objects; @@ -235,6 +237,11 @@ public DocVector filter(int... positions) { } } + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + throw new UnsupportedOperationException("can't lookup values from DocVector"); + } + @Override public ElementType elementType() { return ElementType.DOC; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/OrdinalBytesRefVector.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/OrdinalBytesRefVector.java index a67db54b68ec9..ec0c7efa715ad 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/OrdinalBytesRefVector.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/OrdinalBytesRefVector.java @@ -10,6 +10,8 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; import org.elasticsearch.core.Releasables; import java.io.IOException; @@ -120,6 +122,11 @@ public BytesRefVector filter(int... positions) { } } + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return new BytesRefLookup(asBlock(), positions, targetBlockSize); + } + @Override public ElementType elementType() { return bytes.elementType(); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Vector.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Vector.java index 89b39569be454..9a5688685374d 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Vector.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Vector.java @@ -8,8 +8,10 @@ package org.elasticsearch.compute.data; import org.apache.lucene.util.Accountable; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.core.RefCounted; import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.ReleasableIterator; /** * A dense Vector of single values. @@ -35,6 +37,33 @@ public interface Vector extends Accountable, RefCounted, Releasable { */ Vector filter(int... positions); + /** + * Builds an Iterator of new {@link Block}s with the same {@link #elementType} + * as this {@link Vector} whose values are copied from positions in this Vector. + * It has the same number of {@link #getPositionCount() positions} as the + * {@code positions} parameter. + *

+ * For example, if this vector contained {@code [a, b, c]} + * and were called with the block {@code [0, 1, 1, [1, 2]]} then the + * result would be {@code [a, b, b, [b, c]]}. + *

+ *

+ * This process produces {@code count(positions)} values per + * positions which could be quite large. Instead of returning a single + * Block, this returns an Iterator of Blocks containing all of the promised + * values. + *

+ *

+ * The returned {@link ReleasableIterator} may retain a reference to the + * {@code positions} parameter. Close it to release those references. + *

+ *

+ * This block is built using the same {@link BlockFactory} as was used to + * build the {@code positions} parameter. + *

+ */ + ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize); + /** * {@return the element type of this vector} */ diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st index 7eeb7765e3b1e..d594d32898d36 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st @@ -12,7 +12,9 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BytesRefArray; +import org.elasticsearch.core.ReleasableIterator; import org.elasticsearch.core.Releasables; import java.io.IOException; @@ -21,6 +23,8 @@ $else$ import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; import java.util.stream.Collectors; @@ -38,7 +42,9 @@ final class $Type$ArrayVector extends AbstractVector implements $Type$Vector { static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance($Type$ArrayVector.class) // TODO: remove these extra bytes once `asBlock` returns a block with a separate reference to the vector. - + RamUsageEstimator.shallowSizeOfInstance($Type$VectorBlock.class); + + RamUsageEstimator.shallowSizeOfInstance($Type$VectorBlock.class) + // TODO: remove this if/when we account for memory used by Pages + + Block.PAGE_MEM_OVERHEAD_PER_BLOCK; $if(BytesRef)$ private final BytesRefArray values; @@ -166,6 +172,11 @@ $endif$ } } + @Override + public ReleasableIterator<$Type$Block> lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return new $Type$Lookup(asBlock(), positions, targetBlockSize); + } + public static long ramBytesEstimated($if(BytesRef)$BytesRefArray$else$$type$[]$endif$ values) { return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(values); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BigArrayVector.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BigArrayVector.java.st index d6a8723748c1f..30ef9e799cf11 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BigArrayVector.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BigArrayVector.java.st @@ -10,8 +10,10 @@ package org.elasticsearch.compute.data; import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.$Array$; import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; @@ -148,6 +150,11 @@ $endif$ return new $Type$BigArrayVector(filtered, positions.length, blockFactory); } + @Override + public ReleasableIterator<$Type$Block> lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return new $Type$Lookup(asBlock(), positions, targetBlockSize); + } + @Override public void closeInternal() { // The circuit breaker that tracks the values {@link $if(boolean)$Bit$else$$Type$$endif$Array} is adjusted outside diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st index 0d3d2293a1bb1..8397a0f5274f1 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st @@ -31,10 +31,6 @@ final class $Type$BlockBuilder extends AbstractBlockBuilder implements $Type$Blo $if(BytesRef)$ private BytesRefArray values; - BytesRefBlockBuilder(int estimatedSize, BlockFactory blockFactory) { - this(estimatedSize, BigArrays.NON_RECYCLING_INSTANCE, blockFactory); - } - BytesRefBlockBuilder(int estimatedSize, BigArrays bigArrays, BlockFactory blockFactory) { super(blockFactory); values = new BytesRefArray(Math.max(estimatedSize, 2), bigArrays); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ConstantVector.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ConstantVector.java.st index 37cb2d2412522..42c34128121a8 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ConstantVector.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ConstantVector.java.st @@ -11,6 +11,8 @@ $if(BytesRef)$ import org.apache.lucene.util.BytesRef; $endif$ import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; /** * Vector implementation that stores a constant $type$ value. @@ -58,6 +60,28 @@ $endif$ return blockFactory().newConstant$Type$Vector(value, positions.length); } + @Override + public ReleasableIterator<$Type$Block> lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + if (positions.getPositionCount() == 0) { + return ReleasableIterator.empty(); + } + IntVector positionsVector = positions.asVector(); + if (positionsVector == null) { + return new $Type$Lookup(asBlock(), positions, targetBlockSize); + } + int min = positionsVector.min(); + if (min < 0) { + throw new IllegalArgumentException("invalid position [" + min + "]"); + } + if (min > getPositionCount()) { + return ReleasableIterator.single(($Type$Block) positions.blockFactory().newConstantNullBlock(positions.getPositionCount())); + } + if (positionsVector.max() < getPositionCount()) { + return ReleasableIterator.single(positions.blockFactory().newConstant$Type$BlockWith(value, positions.getPositionCount())); + } + return new $Type$Lookup(asBlock(), positions, targetBlockSize); + } + $if(int)$ /** * The minimum value in the block. diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Vector.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Vector.java.st index 746ccc97a2819..628ee93ed757d 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Vector.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Vector.java.st @@ -13,6 +13,8 @@ $endif$ import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; @@ -54,6 +56,9 @@ $endif$ @Override $Type$Vector filter(int... positions); + @Override + ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize); + $if(int)$ /** * The minimum value in the Vector. An empty Vector will return {@link Integer#MAX_VALUE}. diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBlock.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBlock.java.st index f011d6f2a4b48..8f4390e8782c5 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBlock.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBlock.java.st @@ -72,9 +72,8 @@ $endif$ } @Override - public ReleasableIterator<$Type$Block> lookup(IntBlock positions, ByteSizeValue targetBlockSize) { - // TODO optimizations - return new $Type$Lookup(this, positions, targetBlockSize); + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return vector.lookup(positions, targetBlockSize); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneCountOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneCountOperator.java index d05593015211b..c7f12d1099cc1 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneCountOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneCountOperator.java @@ -11,7 +11,6 @@ import org.apache.lucene.search.LeafCollector; import org.apache.lucene.search.Query; import org.apache.lucene.search.Scorable; -import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Weight; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BooleanBlock; @@ -22,7 +21,6 @@ import org.elasticsearch.core.Releasables; import java.io.IOException; -import java.io.UncheckedIOException; import java.util.List; import java.util.function.Function; @@ -41,11 +39,7 @@ public class LuceneCountOperator extends LuceneOperator { private final LeafCollector leafCollector; - public static class Factory implements LuceneOperator.Factory { - private final DataPartitioning dataPartitioning; - private final int taskConcurrency; - private final int limit; - private final LuceneSliceQueue sliceQueue; + public static class Factory extends LuceneOperator.Factory { public Factory( List contexts, @@ -54,11 +48,7 @@ public Factory( int taskConcurrency, int limit ) { - this.limit = limit; - this.dataPartitioning = dataPartitioning; - var weightFunction = weightFunction(queryFunction, ScoreMode.COMPLETE_NO_SCORES); - this.sliceQueue = LuceneSliceQueue.create(contexts, weightFunction, dataPartitioning, taskConcurrency); - this.taskConcurrency = Math.min(sliceQueue.totalSlices(), taskConcurrency); + super(contexts, queryFunction, dataPartitioning, taskConcurrency, limit); } @Override @@ -66,15 +56,6 @@ public SourceOperator get(DriverContext driverContext) { return new LuceneCountOperator(driverContext.blockFactory(), sliceQueue, limit); } - @Override - public int taskConcurrency() { - return taskConcurrency; - } - - public int limit() { - return limit; - } - @Override public String describe() { return "LuceneCountOperator[dataPartitioning = " + dataPartitioning + ", limit = " + limit + "]"; @@ -118,7 +99,7 @@ public void finish() { } @Override - public Page getOutput() { + protected Page getCheckedOutput() throws IOException { if (isFinished()) { assert remainingDocs <= 0 : remainingDocs; return null; @@ -170,8 +151,6 @@ public Page getOutput() { } } return page; - } catch (IOException e) { - throw new UncheckedIOException(e); } finally { processingNanos += System.nanoTime() - start; } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java index 9a7abb2aafc58..10c78be15bd86 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java @@ -23,6 +23,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.Operator; import org.elasticsearch.compute.operator.SourceOperator; import org.elasticsearch.core.TimeValue; @@ -34,6 +35,7 @@ import java.io.UncheckedIOException; import java.util.Collections; import java.util.HashSet; +import java.util.List; import java.util.Objects; import java.util.Set; import java.util.TreeSet; @@ -72,10 +74,46 @@ protected LuceneOperator(BlockFactory blockFactory, int maxPageSize, LuceneSlice this.sliceQueue = sliceQueue; } - public interface Factory extends SourceOperator.SourceOperatorFactory { - int taskConcurrency(); + public abstract static class Factory implements SourceOperator.SourceOperatorFactory { + protected final DataPartitioning dataPartitioning; + protected final int taskConcurrency; + protected final int limit; + protected final LuceneSliceQueue sliceQueue; + + protected Factory( + List contexts, + Function queryFunction, + DataPartitioning dataPartitioning, + int taskConcurrency, + int limit + ) { + this.limit = limit; + this.dataPartitioning = dataPartitioning; + var weightFunction = weightFunction(queryFunction, ScoreMode.COMPLETE_NO_SCORES); + this.sliceQueue = LuceneSliceQueue.create(contexts, weightFunction, dataPartitioning, taskConcurrency); + this.taskConcurrency = Math.min(sliceQueue.totalSlices(), taskConcurrency); + } + + public final int taskConcurrency() { + return taskConcurrency; + } + + public final int limit() { + return limit; + } } + @Override + public final Page getOutput() { + try { + return getCheckedOutput(); + } catch (IOException ioe) { + throw new UncheckedIOException(ioe); + } + } + + protected abstract Page getCheckedOutput() throws IOException; + @Override public void close() {} @@ -257,7 +295,7 @@ private Status(LuceneOperator operator) { Status(StreamInput in) throws IOException { processedSlices = in.readVInt(); - if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_STATUS_INCLUDE_LUCENE_QUERIES)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { processedQueries = in.readCollectionAsSet(StreamInput::readString); processedShards = in.readCollectionAsSet(StreamInput::readString); } else { @@ -276,7 +314,7 @@ private Status(LuceneOperator operator) { @Override public void writeTo(StreamOutput out) throws IOException { out.writeVInt(processedSlices); - if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_STATUS_INCLUDE_LUCENE_QUERIES)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeCollection(processedQueries, StreamOutput::writeString); out.writeCollection(processedShards, StreamOutput::writeString); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSourceOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSourceOperator.java index 9b942114e61f2..64836b00a7e1b 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSourceOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSourceOperator.java @@ -10,7 +10,6 @@ import org.apache.lucene.search.LeafCollector; import org.apache.lucene.search.Query; import org.apache.lucene.search.Scorable; -import org.apache.lucene.search.ScoreMode; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DocVector; import org.elasticsearch.compute.data.IntBlock; @@ -21,7 +20,6 @@ import org.elasticsearch.core.Releasables; import java.io.IOException; -import java.io.UncheckedIOException; import java.util.List; import java.util.function.Function; @@ -37,12 +35,9 @@ public class LuceneSourceOperator extends LuceneOperator { private final LeafCollector leafCollector; private final int minPageSize; - public static class Factory implements LuceneOperator.Factory { - private final DataPartitioning dataPartitioning; - private final int taskConcurrency; + public static class Factory extends LuceneOperator.Factory { + private final int maxPageSize; - private final int limit; - private final LuceneSliceQueue sliceQueue; public Factory( List contexts, @@ -52,12 +47,8 @@ public Factory( int maxPageSize, int limit ) { + super(contexts, queryFunction, dataPartitioning, taskConcurrency, limit); this.maxPageSize = maxPageSize; - this.limit = limit; - this.dataPartitioning = dataPartitioning; - var weightFunction = weightFunction(queryFunction, ScoreMode.COMPLETE_NO_SCORES); - this.sliceQueue = LuceneSliceQueue.create(contexts, weightFunction, dataPartitioning, taskConcurrency); - this.taskConcurrency = Math.min(sliceQueue.totalSlices(), taskConcurrency); } @Override @@ -65,19 +56,10 @@ public SourceOperator get(DriverContext driverContext) { return new LuceneSourceOperator(driverContext.blockFactory(), maxPageSize, sliceQueue, limit); } - @Override - public int taskConcurrency() { - return taskConcurrency; - } - public int maxPageSize() { return maxPageSize; } - public int limit() { - return limit; - } - @Override public String describe() { return "LuceneSourceOperator[dataPartitioning = " @@ -123,7 +105,7 @@ public void finish() { } @Override - public Page getOutput() { + public Page getCheckedOutput() throws IOException { if (isFinished()) { assert currentPagePos == 0 : currentPagePos; return null; @@ -162,8 +144,6 @@ public Page getOutput() { currentPagePos = 0; } return page; - } catch (IOException e) { - throw new UncheckedIOException(e); } finally { processingNanos += System.nanoTime() - start; } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java index 2c22d850daf0c..e9fb15d265fbe 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java @@ -13,7 +13,6 @@ import org.apache.lucene.search.LeafCollector; import org.apache.lucene.search.Query; import org.apache.lucene.search.ScoreDoc; -import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.TopFieldCollector; import org.elasticsearch.common.Strings; import org.elasticsearch.compute.data.BlockFactory; @@ -28,7 +27,6 @@ import org.elasticsearch.search.sort.SortBuilder; import java.io.IOException; -import java.io.UncheckedIOException; import java.util.Arrays; import java.util.List; import java.util.Optional; @@ -39,13 +37,10 @@ * Source operator that builds Pages out of the output of a TopFieldCollector (aka TopN) */ public final class LuceneTopNSourceOperator extends LuceneOperator { - public static final class Factory implements LuceneOperator.Factory { - private final int taskConcurrency; + public static final class Factory extends LuceneOperator.Factory { + ; private final int maxPageSize; private final List> sorts; - private final int limit; - private final DataPartitioning dataPartitioning; - private final LuceneSliceQueue sliceQueue; public Factory( List contexts, @@ -56,13 +51,9 @@ public Factory( int limit, List> sorts ) { + super(contexts, queryFunction, dataPartitioning, taskConcurrency, limit); this.maxPageSize = maxPageSize; this.sorts = sorts; - this.limit = limit; - this.dataPartitioning = dataPartitioning; - var weightFunction = weightFunction(queryFunction, ScoreMode.TOP_DOCS); - this.sliceQueue = LuceneSliceQueue.create(contexts, weightFunction, dataPartitioning, taskConcurrency); - this.taskConcurrency = Math.min(sliceQueue.totalSlices(), taskConcurrency); } @Override @@ -70,19 +61,10 @@ public SourceOperator get(DriverContext driverContext) { return new LuceneTopNSourceOperator(driverContext.blockFactory(), maxPageSize, sorts, limit, sliceQueue); } - @Override - public int taskConcurrency() { - return taskConcurrency; - } - public int maxPageSize() { return maxPageSize; } - public int limit() { - return limit; - } - @Override public String describe() { String notPrettySorts = sorts.stream().map(Strings::toString).collect(Collectors.joining(",")); @@ -136,7 +118,7 @@ public void finish() { } @Override - public Page getOutput() { + public Page getCheckedOutput() throws IOException { if (isFinished()) { return null; } @@ -152,7 +134,7 @@ public Page getOutput() { } } - private Page collect() { + private Page collect() throws IOException { assert doneCollecting == false; var scorer = getCurrentOrLoadNextScorer(); if (scorer == null) { @@ -169,8 +151,6 @@ private Page collect() { } catch (CollectionTerminatedException cte) { // Lucene terminated early the collection (doing topN for an index that's sorted and the topN uses the same sorting) scorer.markAsDone(); - } catch (IOException e) { - throw new UncheckedIOException(e); } if (scorer.isDone()) { var nextScorer = getCurrentOrLoadNextScorer(); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorFactory.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorFactory.java index 58f2c8de67b61..899060dae5fbb 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorFactory.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorFactory.java @@ -13,7 +13,6 @@ import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Query; -import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Weight; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.PriorityQueue; @@ -48,13 +47,23 @@ * This operator currently only supports shard level concurrency. A new concurrency mechanism should be introduced at the time serie level * in order to read tsdb indices in parallel. */ -public record TimeSeriesSortedSourceOperatorFactory( - int limit, - int maxPageSize, - int taskConcurrency, - TimeValue timeSeriesPeriod, - LuceneSliceQueue sliceQueue -) implements LuceneOperator.Factory { +public class TimeSeriesSortedSourceOperatorFactory extends LuceneOperator.Factory { + + private final int maxPageSize; + private final TimeValue timeSeriesPeriod; + + private TimeSeriesSortedSourceOperatorFactory( + List contexts, + Function queryFunction, + int taskConcurrency, + int maxPageSize, + TimeValue timeSeriesPeriod, + int limit + ) { + super(contexts, queryFunction, DataPartitioning.SHARD, taskConcurrency, limit); + this.maxPageSize = maxPageSize; + this.timeSeriesPeriod = timeSeriesPeriod; + } @Override public SourceOperator get(DriverContext driverContext) { @@ -62,11 +71,6 @@ public SourceOperator get(DriverContext driverContext) { return new Impl(driverContext.blockFactory(), sliceQueue, maxPageSize, limit, rounding); } - @Override - public int taskConcurrency() { - return taskConcurrency; - } - @Override public String describe() { return "TimeSeriesSortedSourceOperator[maxPageSize = " + maxPageSize + ", limit = " + limit + "]"; @@ -80,10 +84,14 @@ public static TimeSeriesSortedSourceOperatorFactory create( List searchContexts, Function queryFunction ) { - var weightFunction = LuceneOperator.weightFunction(queryFunction, ScoreMode.COMPLETE_NO_SCORES); - var sliceQueue = LuceneSliceQueue.create(searchContexts, weightFunction, DataPartitioning.SHARD, taskConcurrency); - taskConcurrency = Math.min(sliceQueue.totalSlices(), taskConcurrency); - return new TimeSeriesSortedSourceOperatorFactory(limit, maxPageSize, taskConcurrency, timeSeriesPeriod, sliceQueue); + return new TimeSeriesSortedSourceOperatorFactory( + searchContexts, + queryFunction, + taskConcurrency, + maxPageSize, + timeSeriesPeriod, + limit + ); } static final class Impl extends SourceOperator { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorFactories.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorFactories.java new file mode 100644 index 0000000000000..bb8d3fd269a8a --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorFactories.java @@ -0,0 +1,157 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.operator; + +import org.elasticsearch.compute.aggregation.AggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.AggregatorMode; +import org.elasticsearch.compute.aggregation.GroupingAggregator; +import org.elasticsearch.compute.aggregation.blockhash.BlockHash; +import org.elasticsearch.compute.aggregation.blockhash.TimeSeriesBlockHash; +import org.elasticsearch.compute.data.ElementType; + +import java.util.ArrayList; +import java.util.List; + +/** + * This class provides operator factories for time-series aggregations. + * A time-series aggregation executes in three stages, deviating from the typical two-stage aggregation. + * For example: {@code sum(rate(write_requests)), avg(cpu) BY cluster, time-bucket} + * + * 1. Initial Stage: + * In this stage, a standard hash aggregation is executed, grouped by tsid and time-bucket. + * The {@code values} aggregations are added to collect values of the grouping keys excluding the time-bucket, + * which are then used for final result grouping. + * {@code rate[INITIAL](write_requests), avg[INITIAL](cpu), values[SINGLE](cluster) BY tsid, time-bucket} + * + * 2. Intermediate Stage: + * Equivalent to the final mode of a standard hash aggregation. + * This stage merges and reduces the result of the rate aggregations, + * but merges (without reducing) the results of non-rate aggregations. + * {@code rate[FINAL](write_requests), avg[INTERMEDIATE](cpu), values[SINGLE](cluster) BY tsid, time-bucket} + * + * 3. Final Stage: + * This extra stage performs outer aggregations over the rate results + * and combines the intermediate results of non-rate aggregations using the specified user-defined grouping keys. + * {@code sum[SINGLE](rate_result), avg[FINAL](cpu) BY cluster, bucket} + */ +public final class TimeSeriesAggregationOperatorFactories { + + public record Initial( + int tsHashChannel, + int timeBucketChannel, + List groupings, + List rates, + List nonRates, + int maxPageSize + ) implements Operator.OperatorFactory { + @Override + public Operator get(DriverContext driverContext) { + List aggregators = new ArrayList<>(groupings.size() + rates.size() + nonRates.size()); + for (AggregatorFunctionSupplier f : rates) { + aggregators.add(f.groupingAggregatorFactory(AggregatorMode.INITIAL)); + } + for (AggregatorFunctionSupplier f : nonRates) { + aggregators.add(f.groupingAggregatorFactory(AggregatorMode.INITIAL)); + } + aggregators.addAll(valuesAggregatorForGroupings(groupings, timeBucketChannel)); + return new HashAggregationOperator( + aggregators, + () -> new TimeSeriesBlockHash(tsHashChannel, timeBucketChannel, driverContext), + driverContext + ); + } + + @Override + public String describe() { + return "TimeSeriesInitialAggregationOperatorFactory"; + } + } + + public record Intermediate( + int tsHashChannel, + int timeBucketChannel, + List groupings, + List rates, + List nonRates, + int maxPageSize + ) implements Operator.OperatorFactory { + @Override + public Operator get(DriverContext driverContext) { + List aggregators = new ArrayList<>(groupings.size() + rates.size() + nonRates.size()); + for (AggregatorFunctionSupplier f : rates) { + aggregators.add(f.groupingAggregatorFactory(AggregatorMode.FINAL)); + } + for (AggregatorFunctionSupplier f : nonRates) { + aggregators.add(f.groupingAggregatorFactory(AggregatorMode.INTERMEDIATE)); + } + aggregators.addAll(valuesAggregatorForGroupings(groupings, timeBucketChannel)); + List hashGroups = List.of( + new BlockHash.GroupSpec(tsHashChannel, ElementType.BYTES_REF), + new BlockHash.GroupSpec(timeBucketChannel, ElementType.LONG) + ); + return new HashAggregationOperator( + aggregators, + () -> BlockHash.build(hashGroups, driverContext.blockFactory(), maxPageSize, false), + driverContext + ); + } + + @Override + public String describe() { + return "TimeSeriesIntermediateAggregationOperatorFactory"; + } + } + + public record Final( + List groupings, + List outerRates, + List nonRates, + int maxPageSize + ) implements Operator.OperatorFactory { + @Override + public Operator get(DriverContext driverContext) { + List aggregators = new ArrayList<>(outerRates.size() + nonRates.size()); + for (AggregatorFunctionSupplier f : outerRates) { + aggregators.add(f.groupingAggregatorFactory(AggregatorMode.SINGLE)); + } + for (AggregatorFunctionSupplier f : nonRates) { + aggregators.add(f.groupingAggregatorFactory(AggregatorMode.FINAL)); + } + return new HashAggregationOperator( + aggregators, + () -> BlockHash.build(groupings, driverContext.blockFactory(), maxPageSize, false), + driverContext + ); + } + + @Override + public String describe() { + return "TimeSeriesFinalAggregationOperatorFactory"; + } + } + + static List valuesAggregatorForGroupings(List groupings, int timeBucketChannel) { + List aggregators = new ArrayList<>(); + for (BlockHash.GroupSpec g : groupings) { + if (g.channel() != timeBucketChannel) { + final List channels = List.of(g.channel()); + // TODO: perhaps introduce a specialized aggregator for this? + var aggregatorSupplier = (switch (g.elementType()) { + case BYTES_REF -> new org.elasticsearch.compute.aggregation.ValuesBytesRefAggregatorFunctionSupplier(channels); + case DOUBLE -> new org.elasticsearch.compute.aggregation.ValuesDoubleAggregatorFunctionSupplier(channels); + case INT -> new org.elasticsearch.compute.aggregation.ValuesIntAggregatorFunctionSupplier(channels); + case LONG -> new org.elasticsearch.compute.aggregation.ValuesLongAggregatorFunctionSupplier(channels); + case BOOLEAN -> new org.elasticsearch.compute.aggregation.ValuesBooleanAggregatorFunctionSupplier(channels); + case NULL, DOC, UNKNOWN -> throw new IllegalArgumentException("unsupported grouping type"); + }); + aggregators.add(aggregatorSupplier.groupingAggregatorFactory(AggregatorMode.SINGLE)); + } + } + return aggregators; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorFactory.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorFactory.java deleted file mode 100644 index 0cf0854a9b0c7..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorFactory.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.compute.operator; - -import org.elasticsearch.compute.aggregation.AggregatorMode; -import org.elasticsearch.compute.aggregation.GroupingAggregator; -import org.elasticsearch.compute.aggregation.blockhash.BlockHash; -import org.elasticsearch.compute.aggregation.blockhash.TimeSeriesBlockHash; -import org.elasticsearch.core.TimeValue; - -import java.util.List; - -public record TimeSeriesAggregationOperatorFactory( - AggregatorMode mode, - int tsHashChannel, - int timestampIntervalChannel, - TimeValue timeSeriesPeriod, - List aggregators, - int maxPageSize -) implements Operator.OperatorFactory { - - @Override - public String describe() { - return "TimeSeriesAggregationOperator[mode=" - + mode - + ", tsHashChannel = " - + tsHashChannel - + ", timestampIntervalChannel = " - + timestampIntervalChannel - + ", timeSeriesPeriod = " - + timeSeriesPeriod - + ", maxPageSize = " - + maxPageSize - + "]"; - } - - @Override - public Operator get(DriverContext driverContext) { - BlockHash blockHash = new TimeSeriesBlockHash(tsHashChannel, timestampIntervalChannel, driverContext); - return new HashAggregationOperator(aggregators, () -> blockHash, driverContext); - } - -} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceHandler.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceHandler.java index f1698ea401d28..adce8d8a88407 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceHandler.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceHandler.java @@ -10,6 +10,7 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.RefCountingListener; import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.compute.data.Page; @@ -17,6 +18,7 @@ import org.elasticsearch.tasks.TaskCancelledException; import org.elasticsearch.transport.TransportException; +import java.util.List; import java.util.concurrent.Executor; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; @@ -89,6 +91,20 @@ public int bufferSize() { } } + public void addCompletionListener(ActionListener listener) { + buffer.addCompletionListener(ActionListener.running(() -> { + try (RefCountingListener refs = new RefCountingListener(listener)) { + for (PendingInstances pending : List.of(outstandingSinks, outstandingSources)) { + // Create an outstanding instance and then finish to complete the completionListener + // if we haven't registered any instances of exchange sinks or exchange sources before. + pending.trackNewInstance(); + pending.completion.addListener(refs.acquire()); + pending.finishInstance(); + } + } + })); + } + /** * Create a new {@link ExchangeSource} for exchanging data * @@ -253,10 +269,10 @@ public Releasable addEmptySink() { private static class PendingInstances { private final AtomicInteger instances = new AtomicInteger(); - private final Releasable onComplete; + private final SubscribableListener completion = new SubscribableListener<>(); - PendingInstances(Releasable onComplete) { - this.onComplete = onComplete; + PendingInstances(Runnable onComplete) { + completion.addListener(ActionListener.running(onComplete)); } void trackNewInstance() { @@ -268,7 +284,7 @@ void finishInstance() { int refs = instances.decrementAndGet(); assert refs >= 0; if (refs == 0) { - onComplete.close(); + completion.onResponse(null); } } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java index 3d80e560cc4d2..017d4c7065bed 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java @@ -35,6 +35,7 @@ import java.util.BitSet; import java.util.List; import java.util.function.BiConsumer; +import java.util.function.Consumer; import java.util.function.Supplier; import java.util.stream.IntStream; import java.util.stream.LongStream; @@ -283,8 +284,19 @@ public void testConstantIntBlock() { positions(blockFactory, 1, 2, new int[] { 1, 2 }), List.of(List.of(value), List.of(value), List.of(value, value)) ); + assertLookup( + block, + positions(blockFactory, 1, 2), + List.of(List.of(value), List.of(value)), + b -> assertThat(b.asVector(), instanceOf(ConstantIntVector.class)) + ); } - assertLookup(block, positions(blockFactory, positionCount + 1000), singletonList(null)); + assertLookup( + block, + positions(blockFactory, positionCount + 1000), + singletonList(null), + b -> assertThat(b, instanceOf(ConstantNullBlock.class)) + ); assertEmptyLookup(blockFactory, block); assertThat(block.asVector().min(), equalTo(value)); assertThat(block.asVector().max(), equalTo(value)); @@ -365,8 +377,19 @@ public void testConstantLongBlock() { positions(blockFactory, 1, 2, new int[] { 1, 2 }), List.of(List.of(value), List.of(value), List.of(value, value)) ); + assertLookup( + block, + positions(blockFactory, 1, 2), + List.of(List.of(value), List.of(value)), + b -> assertThat(b.asVector(), instanceOf(ConstantLongVector.class)) + ); } - assertLookup(block, positions(blockFactory, positionCount + 1000), singletonList(null)); + assertLookup( + block, + positions(blockFactory, positionCount + 1000), + singletonList(null), + b -> assertThat(b, instanceOf(ConstantNullBlock.class)) + ); assertEmptyLookup(blockFactory, block); releaseAndAssertBreaker(block); } @@ -447,8 +470,19 @@ public void testConstantDoubleBlock() { positions(blockFactory, 1, 2, new int[] { 1, 2 }), List.of(List.of(value), List.of(value), List.of(value, value)) ); + assertLookup( + block, + positions(blockFactory, 1, 2), + List.of(List.of(value), List.of(value)), + b -> assertThat(b.asVector(), instanceOf(ConstantDoubleVector.class)) + ); } - assertLookup(block, positions(blockFactory, positionCount + 1000), singletonList(null)); + assertLookup( + block, + positions(blockFactory, positionCount + 1000), + singletonList(null), + b -> assertThat(b, instanceOf(ConstantNullBlock.class)) + ); assertEmptyLookup(blockFactory, block); releaseAndAssertBreaker(block); } @@ -605,8 +639,19 @@ public void testConstantBytesRefBlock() { positions(blockFactory, 1, 2, new int[] { 1, 2 }), List.of(List.of(value), List.of(value), List.of(value, value)) ); + assertLookup( + block, + positions(blockFactory, 1, 2), + List.of(List.of(value), List.of(value)), + b -> assertThat(b.asVector(), instanceOf(ConstantBytesRefVector.class)) + ); } - assertLookup(block, positions(blockFactory, positionCount + 1000), singletonList(null)); + assertLookup( + block, + positions(blockFactory, positionCount + 1000), + singletonList(null), + b -> assertThat(b, instanceOf(ConstantNullBlock.class)) + ); assertEmptyLookup(blockFactory, block); releaseAndAssertBreaker(block); } @@ -689,8 +734,19 @@ public void testConstantBooleanBlock() { positions(blockFactory, 1, 2, new int[] { 1, 2 }), List.of(List.of(value), List.of(value), List.of(value, value)) ); + assertLookup( + block, + positions(blockFactory, 1, 2), + List.of(List.of(value), List.of(value)), + b -> assertThat(b.asVector(), instanceOf(ConstantBooleanVector.class)) + ); } - assertLookup(block, positions(blockFactory, positionCount + 1000), singletonList(null)); + assertLookup( + block, + positions(blockFactory, positionCount + 1000), + singletonList(null), + b -> assertThat(b, instanceOf(ConstantNullBlock.class)) + ); assertEmptyLookup(blockFactory, block); releaseAndAssertBreaker(block); } @@ -716,6 +772,24 @@ public void testConstantNullBlock() { assertThat(positionCount, is(block.getPositionCount())); assertThat(block.getPositionCount(), is(positionCount)); assertThat(block.isNull(randomPosition(positionCount)), is(true)); + if (positionCount > 2) { + List> expected = new ArrayList<>(); + expected.add(null); + expected.add(null); + expected.add(null); + assertLookup( + block, + positions(blockFactory, 1, 2, new int[] { 1, 2 }), + expected, + b -> assertThat(b, instanceOf(ConstantNullBlock.class)) + ); + } + assertLookup( + block, + positions(blockFactory, positionCount + 1000), + singletonList(null), + b -> assertThat(b, instanceOf(ConstantNullBlock.class)) + ); releaseAndAssertBreaker(block); } } @@ -1544,11 +1618,16 @@ static void assertEmptyLookup(BlockFactory blockFactory, Block block) { } static void assertLookup(Block block, IntBlock positions, List> expected) { + assertLookup(block, positions, expected, l -> {}); + } + + static void assertLookup(Block block, IntBlock positions, List> expected, Consumer extra) { try (positions; ReleasableIterator lookup = block.lookup(positions, ByteSizeValue.ofKb(100))) { assertThat(lookup.hasNext(), equalTo(true)); try (Block b = lookup.next()) { assertThat(valuesAtPositions(b, 0, b.getPositionCount()), equalTo(expected)); assertThat(b.blockFactory(), sameInstance(positions.blockFactory())); + extra.accept(b); } assertThat(lookup.hasNext(), equalTo(false)); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockAccountingTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockAccountingTests.java index ae43e3954935d..86bfec5120945 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockAccountingTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockAccountingTests.java @@ -42,9 +42,8 @@ public class BlockAccountingTests extends ComputeTestCase { public void testBooleanVector() { BlockFactory blockFactory = blockFactory(); Vector empty = blockFactory.newBooleanArrayVector(new boolean[] {}, 0); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + RamUsageEstimator.shallowSizeOfInstance( - BooleanVectorBlock.class - ); + long expectedEmptyUsed = Block.PAGE_MEM_OVERHEAD_PER_BLOCK + RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + + RamUsageEstimator.shallowSizeOfInstance(BooleanVectorBlock.class); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); Vector emptyPlusOne = blockFactory.newBooleanArrayVector(new boolean[] { randomBoolean() }, 1); @@ -62,9 +61,8 @@ public void testBooleanVector() { public void testIntVector() { BlockFactory blockFactory = blockFactory(); Vector empty = blockFactory.newIntArrayVector(new int[] {}, 0); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + RamUsageEstimator.shallowSizeOfInstance( - IntVectorBlock.class - ); + long expectedEmptyUsed = Block.PAGE_MEM_OVERHEAD_PER_BLOCK + RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + + RamUsageEstimator.shallowSizeOfInstance(IntVectorBlock.class); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); Vector emptyPlusOne = blockFactory.newIntArrayVector(new int[] { randomInt() }, 1); @@ -82,9 +80,8 @@ public void testIntVector() { public void testLongVector() { BlockFactory blockFactory = blockFactory(); Vector empty = blockFactory.newLongArrayVector(new long[] {}, 0); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + RamUsageEstimator.shallowSizeOfInstance( - LongVectorBlock.class - ); + long expectedEmptyUsed = Block.PAGE_MEM_OVERHEAD_PER_BLOCK + RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + + RamUsageEstimator.shallowSizeOfInstance(LongVectorBlock.class); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); Vector emptyPlusOne = blockFactory.newLongArrayVector(new long[] { randomLong() }, 1); @@ -103,9 +100,8 @@ public void testLongVector() { public void testDoubleVector() { BlockFactory blockFactory = blockFactory(); Vector empty = blockFactory.newDoubleArrayVector(new double[] {}, 0); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + RamUsageEstimator.shallowSizeOfInstance( - DoubleVectorBlock.class - ); + long expectedEmptyUsed = Block.PAGE_MEM_OVERHEAD_PER_BLOCK + RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + + RamUsageEstimator.shallowSizeOfInstance(DoubleVectorBlock.class); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); Vector emptyPlusOne = blockFactory.newDoubleArrayVector(new double[] { randomDouble() }, 1); @@ -127,9 +123,8 @@ public void testBytesRefVector() { var emptyArray = new BytesRefArray(0, blockFactory.bigArrays()); var arrayWithOne = new BytesRefArray(0, blockFactory.bigArrays()); Vector emptyVector = blockFactory.newBytesRefArrayVector(emptyArray, 0); - long expectedEmptyVectorUsed = RamUsageTester.ramUsed(emptyVector, RAM_USAGE_ACCUMULATOR) + RamUsageEstimator.shallowSizeOfInstance( - BytesRefVectorBlock.class - ); + long expectedEmptyVectorUsed = Block.PAGE_MEM_OVERHEAD_PER_BLOCK + RamUsageTester.ramUsed(emptyVector, RAM_USAGE_ACCUMULATOR) + + RamUsageEstimator.shallowSizeOfInstance(BytesRefVectorBlock.class); assertThat(emptyVector.ramBytesUsed(), is(expectedEmptyVectorUsed)); var bytesRef = new BytesRef(randomAlphaOfLengthBetween(1, 16)); @@ -146,9 +141,8 @@ public void testBytesRefVector() { public void testBooleanBlock() { BlockFactory blockFactory = blockFactory(); Block empty = new BooleanArrayBlock(new boolean[] {}, 0, new int[] { 0 }, null, Block.MvOrdering.UNORDERED, blockFactory); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + RamUsageEstimator.shallowSizeOfInstance( - BooleanVectorBlock.class - ); + long expectedEmptyUsed = Block.PAGE_MEM_OVERHEAD_PER_BLOCK + RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + + RamUsageEstimator.shallowSizeOfInstance(BooleanVectorBlock.class); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); Block emptyPlusOne = new BooleanArrayBlock( @@ -194,18 +188,16 @@ public void testBooleanBlockWithNullFirstValues() { Block.MvOrdering.UNORDERED, blockFactory() ); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + RamUsageEstimator.shallowSizeOfInstance( - BooleanVectorBlock.class - ); + long expectedEmptyUsed = Block.PAGE_MEM_OVERHEAD_PER_BLOCK + RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + + RamUsageEstimator.shallowSizeOfInstance(BooleanVectorBlock.class); assertThat(empty.ramBytesUsed(), lessThanOrEqualTo(expectedEmptyUsed)); } public void testIntBlock() { BlockFactory blockFactory = blockFactory(); Block empty = new IntArrayBlock(new int[] {}, 0, new int[] { 0 }, null, Block.MvOrdering.UNORDERED, blockFactory); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + RamUsageEstimator.shallowSizeOfInstance( - IntVectorBlock.class - ); + long expectedEmptyUsed = Block.PAGE_MEM_OVERHEAD_PER_BLOCK + RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + + RamUsageEstimator.shallowSizeOfInstance(IntVectorBlock.class); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); Block emptyPlusOne = new IntArrayBlock( @@ -242,18 +234,16 @@ public void testIntBlock() { public void testIntBlockWithNullFirstValues() { BlockFactory blockFactory = blockFactory(); Block empty = new IntArrayBlock(new int[] {}, 0, null, BitSet.valueOf(new byte[] { 1 }), Block.MvOrdering.UNORDERED, blockFactory); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + RamUsageEstimator.shallowSizeOfInstance( - IntVectorBlock.class - ); + long expectedEmptyUsed = Block.PAGE_MEM_OVERHEAD_PER_BLOCK + RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + + RamUsageEstimator.shallowSizeOfInstance(IntVectorBlock.class); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); } public void testLongBlock() { BlockFactory blockFactory = blockFactory(); Block empty = new LongArrayBlock(new long[] {}, 0, new int[] { 0 }, null, Block.MvOrdering.UNORDERED, blockFactory); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + RamUsageEstimator.shallowSizeOfInstance( - LongVectorBlock.class - ); + long expectedEmptyUsed = Block.PAGE_MEM_OVERHEAD_PER_BLOCK + RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + + RamUsageEstimator.shallowSizeOfInstance(LongVectorBlock.class); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); Block emptyPlusOne = new LongArrayBlock( @@ -299,18 +289,16 @@ public void testLongBlockWithNullFirstValues() { Block.MvOrdering.UNORDERED, blockFactory() ); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + RamUsageEstimator.shallowSizeOfInstance( - LongVectorBlock.class - ); + long expectedEmptyUsed = Block.PAGE_MEM_OVERHEAD_PER_BLOCK + RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + + RamUsageEstimator.shallowSizeOfInstance(LongVectorBlock.class); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); } public void testDoubleBlock() { BlockFactory blockFactory = blockFactory(); Block empty = new DoubleArrayBlock(new double[] {}, 0, new int[] { 0 }, null, Block.MvOrdering.UNORDERED, blockFactory); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + RamUsageEstimator.shallowSizeOfInstance( - DoubleVectorBlock.class - ); + long expectedEmptyUsed = Block.PAGE_MEM_OVERHEAD_PER_BLOCK + RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + + RamUsageEstimator.shallowSizeOfInstance(DoubleVectorBlock.class); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); Block emptyPlusOne = new DoubleArrayBlock( @@ -356,9 +344,8 @@ public void testDoubleBlockWithNullFirstValues() { Block.MvOrdering.UNORDERED, blockFactory() ); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + RamUsageEstimator.shallowSizeOfInstance( - DoubleVectorBlock.class - ); + long expectedEmptyUsed = Block.PAGE_MEM_OVERHEAD_PER_BLOCK + RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + + RamUsageEstimator.shallowSizeOfInstance(DoubleVectorBlock.class); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorTests.java index 79135b12b2a83..573c960e86b9c 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorTests.java @@ -11,65 +11,49 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Randomness; -import org.elasticsearch.compute.aggregation.AggregatorMode; +import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.compute.aggregation.RateLongAggregatorFunctionSupplier; -import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.aggregation.SumDoubleAggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.blockhash.BlockHash; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BlockUtils; import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.lucene.ValuesSourceReaderOperatorTests; import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.NumberFieldMapper; -import org.hamcrest.Matcher; import org.junit.After; import java.io.IOException; import java.util.ArrayList; -import java.util.HashMap; import java.util.List; -import java.util.Map; +import java.util.stream.IntStream; import static org.elasticsearch.compute.lucene.TimeSeriesSortedSourceOperatorTests.createTimeSeriesSourceOperator; import static org.elasticsearch.compute.lucene.TimeSeriesSortedSourceOperatorTests.writeTS; -import static org.elasticsearch.index.mapper.DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER; -import static org.elasticsearch.test.MapMatcher.assertMap; -import static org.elasticsearch.test.MapMatcher.matchesMap; import static org.hamcrest.Matchers.equalTo; -public class TimeSeriesAggregationOperatorTests extends AnyOperatorTestCase { +public class TimeSeriesAggregationOperatorTests extends ComputeTestCase { - private IndexReader reader; - private final Directory directory = newDirectory(); + private IndexReader reader = null; + private Directory directory = null; @After public void cleanup() throws IOException { IOUtils.close(reader, directory); } - @Override - protected Operator.OperatorFactory simple() { - return new TimeSeriesAggregationOperatorFactory(AggregatorMode.FINAL, 0, 1, TimeValue.ZERO, List.of(), 100); + /** + * A {@link DriverContext} with a nonBreakingBigArrays. + */ + protected DriverContext driverContext() { // TODO make this final once all operators support memory tracking + BlockFactory blockFactory = blockFactory(); + return new DriverContext(blockFactory.bigArrays(), blockFactory); } - @Override - protected Matcher expectedDescriptionOfSimple() { - return equalTo( - "TimeSeriesAggregationOperator[mode=FINAL, tsHashChannel = 0, timestampIntervalChannel = 1, " - + "timeSeriesPeriod = 0s, maxPageSize = 100]" - ); - } - - @Override - protected Matcher expectedToStringOfSimple() { - return equalTo( - "HashAggregationOperator[blockHash=TimeSeriesBlockHash{keys=[BytesRefKey[channel=0], " - + "LongKey[channel=1]], entries=-1b}, aggregators=[]]" - ); - } - - public void testBasicRate() { + public void testBasicRate() throws Exception { long[] v1 = { 1, 1, 3, 0, 2, 9, 21, 3, 7, 7, 9, 12 }; long[] t1 = { 1, 5, 11, 20, 21, 59, 88, 91, 92, 97, 99, 112 }; @@ -78,25 +62,51 @@ public void testBasicRate() { long[] v3 = { 0, 1, 0, 1, 1, 4, 2, 2, 2, 2, 3, 5, 5 }; long[] t3 = { 2, 3, 5, 7, 8, 9, 10, 12, 14, 15, 18, 20, 22 }; - List pods = List.of(new Pod("p1", t1, v1), new Pod("p2", t2, v2), new Pod("p3", t3, v3)); - long unit = between(1, 5); - Map actualRates = runRateTest(pods, TimeValue.timeValueMillis(unit), TimeValue.ZERO); - assertThat( - actualRates, - equalTo( - Map.of( - new Group("\u0001\u0003pods\u0002p1", 0), - 35.0 * unit / 111.0, - new Group("\u0001\u0003pods\u0002p2", 0), - 42.0 * unit / 13.0, - new Group("\u0001\u0003pods\u0002p3", 0), - 10.0 * unit / 20.0 - ) - ) + List pods = List.of( + new Pod("p1", "cluster_1", new Interval(2100, t1, v1)), + new Pod("p2", "cluster_1", new Interval(600, t2, v2)), + new Pod("p3", "cluster_2", new Interval(1100, t3, v3)) ); + long unit = between(1, 5); + { + List> actual = runRateTest( + pods, + List.of("cluster"), + TimeValue.timeValueMillis(unit), + TimeValue.timeValueMillis(500) + ); + List> expected = List.of( + List.of(new BytesRef("cluster_1"), 35.0 * unit / 111.0 + 42.0 * unit / 13.0), + List.of(new BytesRef("cluster_2"), 10.0 * unit / 20.0) + ); + assertThat(actual, equalTo(expected)); + } + { + List> actual = runRateTest(pods, List.of("pod"), TimeValue.timeValueMillis(unit), TimeValue.timeValueMillis(500)); + List> expected = List.of( + List.of(new BytesRef("p1"), 35.0 * unit / 111.0), + List.of(new BytesRef("p2"), 42.0 * unit / 13.0), + List.of(new BytesRef("p3"), 10.0 * unit / 20.0) + ); + assertThat(actual, equalTo(expected)); + } + { + List> actual = runRateTest( + pods, + List.of("cluster", "bucket"), + TimeValue.timeValueMillis(unit), + TimeValue.timeValueMillis(500) + ); + List> expected = List.of( + List.of(new BytesRef("cluster_1"), 2000L, 35.0 * unit / 111.0), + List.of(new BytesRef("cluster_1"), 500L, 42.0 * unit / 13.0), + List.of(new BytesRef("cluster_2"), 1000L, 10.0 * unit / 20.0) + ); + assertThat(actual, equalTo(expected)); + } } - public void testRateWithInterval() { + public void testRateWithInterval() throws Exception { long[] v1 = { 1, 2, 3, 0, 1, 2, 3, 4, 5, 0, 1, 2, 3 }; long[] t1 = { 0, 10_000, 20_000, 30_000, 40_000, 50_000, 60_000, 70_000, 80_000, 90_000, 100_000, 110_000, 120_000 }; @@ -105,59 +115,71 @@ public void testRateWithInterval() { long[] v3 = { 2, 4, 8, 16, 32, 64, 128, 256, 512, 1024, 2048, 4096, 8192 }; long[] t3 = { 0, 10_000, 20_000, 30_000, 40_000, 50_000, 60_000, 70_000, 80_000, 90_000, 100_000, 110_000, 120_000 }; - List pods = List.of(new Pod("p1", t1, v1), new Pod("p2", t2, v2), new Pod("p3", t3, v3)); - Map actualRates = runRateTest(pods, TimeValue.timeValueMillis(1), TimeValue.timeValueMinutes(1)); - assertMap( - actualRates, - matchesMap().entry(new Group("\u0001\u0003pods\u0002p1", 120_000), 0.0D) - .entry(new Group("\u0001\u0003pods\u0002p1", 60_000), 8.0E-5D) - .entry(new Group("\u0001\u0003pods\u0002p1", 0), 8.0E-5D) - .entry(new Group("\u0001\u0003pods\u0002p2", 120_000), 0.0D) - .entry(new Group("\u0001\u0003pods\u0002p2", 60_000), 0.0D) - .entry(new Group("\u0001\u0003pods\u0002p2", 0), 0.0D) - .entry(new Group("\u0001\u0003pods\u0002p3", 120_000), 0.0D) - .entry(new Group("\u0001\u0003pods\u0002p3", 60_000), 0.07936D) - .entry(new Group("\u0001\u0003pods\u0002p3", 0), 0.00124D) + List pods = List.of( + new Pod("p1", "cluster_1", new Interval(0, t1, v1)), + new Pod("p2", "cluster_2", new Interval(0, t2, v2)), + new Pod("p3", "cluster_2", new Interval(0, t3, v3)) + ); + List> actual = runRateTest( + pods, + List.of("pod", "bucket"), + TimeValue.timeValueMillis(1), + TimeValue.timeValueMinutes(1) + ); + List> expected = List.of( + List.of(new BytesRef("p1]"), 120_000L, 0.0D), + List.of(new BytesRef("p1"), 60_000L, 8.0E-5D), + List.of(new BytesRef("p1"), 0, 8.0E-5D), + List.of(new BytesRef("p2"), 120_000L, 0.0D), + List.of(new BytesRef("p2"), 60_000L, 0.0D), + List.of(new BytesRef("p2"), 0L, 0.0D), + List.of(new BytesRef("p3"), 120_000L, 0.0D), + List.of(new BytesRef("p3"), 60_000L, 0.07936D), + List.of(new BytesRef("p3"), 0L, 0.00124D) ); } - public void testRandomRate() { + public void testRandomRate() throws Exception { int numPods = between(1, 10); List pods = new ArrayList<>(); - Map expectedRates = new HashMap<>(); TimeValue unit = TimeValue.timeValueSeconds(1); + List> expected = new ArrayList<>(); for (int p = 0; p < numPods; p++) { - int numValues = between(2, 100); - long[] values = new long[numValues]; - long[] times = new long[numValues]; - long t = DEFAULT_DATE_TIME_FORMATTER.parseMillis("2024-01-01T00:00:00Z"); - for (int i = 0; i < numValues; i++) { - values[i] = randomIntBetween(0, 100); - t += TimeValue.timeValueSeconds(between(1, 10)).millis(); - times[i] = t; + int numIntervals = randomIntBetween(1, 3); + Interval[] intervals = new Interval[numIntervals]; + long startTimeInHours = between(10, 100); + String podName = "p" + p; + for (int interval = 0; interval < numIntervals; interval++) { + final long startInterval = TimeValue.timeValueHours(--startTimeInHours).millis(); + int numValues = between(2, 100); + long[] values = new long[numValues]; + long[] times = new long[numValues]; + long delta = 0; + for (int i = 0; i < numValues; i++) { + values[i] = randomIntBetween(0, 100); + delta += TimeValue.timeValueSeconds(between(1, 10)).millis(); + times[i] = delta; + } + intervals[interval] = new Interval(startInterval, times, values); + if (numValues == 1) { + expected.add(List.of(new BytesRef(podName), startInterval, null)); + } else { + expected.add(List.of(new BytesRef(podName), startInterval, intervals[interval].expectedRate(unit))); + } } - Pod pod = new Pod("p" + p, times, values); + Pod pod = new Pod(podName, "cluster", intervals); pods.add(pod); - if (numValues == 1) { - expectedRates.put(new Group("\u0001\u0003pods\u0002" + pod.name, 0), null); - } else { - expectedRates.put(new Group("\u0001\u0003pods\u0002" + pod.name, 0), pod.expectedRate(unit)); - } } - Map actualRates = runRateTest(pods, unit, TimeValue.ZERO); - assertThat(actualRates, equalTo(expectedRates)); + List> actual = runRateTest(pods, List.of("pod", "bucket"), unit, TimeValue.timeValueHours(1)); + assertThat(actual, equalTo(expected)); } - record Pod(String name, long[] times, long[] values) { - Pod { - assert times.length == values.length : times.length + "!=" + values.length; - } - + record Interval(long offset, long[] times, long[] values) { double expectedRate(TimeValue unit) { double dv = 0; - for (int i = 0; i < values.length - 1; i++) { - if (values[i + 1] < values[i]) { - dv += values[i]; + for (int v = 0; v < values.length - 1; v++) { + if (values[v + 1] < values[v]) { + dv += values[v]; } } dv += (values[values.length - 1] - values[0]); @@ -166,9 +188,13 @@ record Pod(String name, long[] times, long[] values) { } } - Map runRateTest(List pods, TimeValue unit, TimeValue interval) { + record Pod(String name, String cluster, Interval... intervals) {} + + List> runRateTest(List pods, List groupings, TimeValue unit, TimeValue bucketInterval) throws IOException { + cleanup(); + directory = newDirectory(); long unitInMillis = unit.millis(); - record Doc(String pod, long timestamp, long requests) { + record Doc(String pod, String cluster, long timestamp, long requests) { } var sourceOperatorFactory = createTimeSeriesSourceOperator( @@ -177,70 +203,114 @@ record Doc(String pod, long timestamp, long requests) { Integer.MAX_VALUE, between(1, 100), randomBoolean(), - interval, + bucketInterval, writer -> { List docs = new ArrayList<>(); for (Pod pod : pods) { - for (int i = 0; i < pod.times.length; i++) { - docs.add(new Doc(pod.name, pod.times[i], pod.values[i])); + for (Interval interval : pod.intervals) { + for (int i = 0; i < interval.times.length; i++) { + docs.add(new Doc(pod.name, pod.cluster, interval.offset + interval.times[i], interval.values[i])); + } } } Randomness.shuffle(docs); for (Doc doc : docs) { - writeTS(writer, doc.timestamp, new Object[] { "pod", doc.pod }, new Object[] { "requests", doc.requests }); + writeTS( + writer, + doc.timestamp, + new Object[] { "pod", doc.pod, "cluster", doc.cluster }, + new Object[] { "requests", doc.requests } + ); } return docs.size(); } ); var ctx = driverContext(); - var aggregators = List.of( - new RateLongAggregatorFunctionSupplier(List.of(4, 2), unitInMillis).groupingAggregatorFactory(AggregatorMode.INITIAL) - ); - Operator initialHash = new TimeSeriesAggregationOperatorFactory( - AggregatorMode.INITIAL, + List extractOperators = new ArrayList<>(); + var rateField = new NumberFieldMapper.NumberFieldType("requests", NumberFieldMapper.NumberType.LONG); + Operator extractRate = (ValuesSourceReaderOperatorTests.factory(reader, rateField, ElementType.LONG).get(ctx)); + extractOperators.add(extractRate); + List nonBucketGroupings = new ArrayList<>(groupings); + nonBucketGroupings.remove("bucket"); + for (String grouping : nonBucketGroupings) { + var groupingField = new KeywordFieldMapper.KeywordFieldType(grouping); + extractOperators.add(ValuesSourceReaderOperatorTests.factory(reader, groupingField, ElementType.BYTES_REF).get(ctx)); + } + // _doc, tsid, timestamp, bucket, requests, grouping1, grouping2 + Operator intialAgg = new TimeSeriesAggregationOperatorFactories.Initial( 1, 3, - interval, - aggregators, - randomIntBetween(1, 1000) + IntStream.range(0, nonBucketGroupings.size()).mapToObj(n -> new BlockHash.GroupSpec(5 + n, ElementType.BYTES_REF)).toList(), + List.of(new RateLongAggregatorFunctionSupplier(List.of(4, 2), unitInMillis)), + List.of(), + between(1, 100) ).get(ctx); - aggregators = List.of( - new RateLongAggregatorFunctionSupplier(List.of(2, 3, 4), unitInMillis).groupingAggregatorFactory(AggregatorMode.FINAL) - ); - Operator finalHash = new TimeSeriesAggregationOperatorFactory( - AggregatorMode.FINAL, + // tsid, bucket, rate[0][0],rate[0][1],rate[0][2], grouping1, grouping2 + Operator intermediateAgg = new TimeSeriesAggregationOperatorFactories.Intermediate( 0, 1, - interval, - aggregators, - randomIntBetween(1, 1000) + IntStream.range(0, nonBucketGroupings.size()).mapToObj(n -> new BlockHash.GroupSpec(5 + n, ElementType.BYTES_REF)).toList(), + List.of(new RateLongAggregatorFunctionSupplier(List.of(2, 3, 4), unitInMillis)), + List.of(), + between(1, 100) ).get(ctx); + // tsid, bucket, rate, grouping1, grouping2 + List finalGroups = new ArrayList<>(); + int groupChannel = 3; + for (String grouping : groupings) { + if (grouping.equals("bucket")) { + finalGroups.add(new BlockHash.GroupSpec(1, ElementType.LONG)); + } else { + finalGroups.add(new BlockHash.GroupSpec(groupChannel++, ElementType.BYTES_REF)); + } + } + Operator finalAgg = new TimeSeriesAggregationOperatorFactories.Final( + finalGroups, + List.of(new SumDoubleAggregatorFunctionSupplier(List.of(2))), + List.of(), + between(1, 100) + ).get(ctx); + List results = new ArrayList<>(); - var requestsField = new NumberFieldMapper.NumberFieldType("requests", NumberFieldMapper.NumberType.LONG); OperatorTestCase.runDriver( new Driver( ctx, sourceOperatorFactory.get(ctx), - List.of(ValuesSourceReaderOperatorTests.factory(reader, requestsField, ElementType.LONG).get(ctx), initialHash, finalHash), + CollectionUtils.concatLists(extractOperators, List.of(intialAgg, intermediateAgg, finalAgg)), new TestResultPageSinkOperator(results::add), () -> {} ) ); - Map rates = new HashMap<>(); + List> values = new ArrayList<>(); for (Page result : results) { - BytesRefBlock keysBlock = result.getBlock(0); - LongBlock timestampIntervalsBock = result.getBlock(1); - DoubleBlock ratesBlock = result.getBlock(2); - for (int i = 0; i < result.getPositionCount(); i++) { - var key = new Group(keysBlock.getBytesRef(i, new BytesRef()).utf8ToString(), timestampIntervalsBock.getLong(i)); - rates.put(key, ratesBlock.getDouble(i)); + for (int p = 0; p < result.getPositionCount(); p++) { + int blockCount = result.getBlockCount(); + List row = new ArrayList<>(); + for (int b = 0; b < blockCount; b++) { + row.add(BlockUtils.toJavaObject(result.getBlock(b), p)); + } + values.add(row); } result.releaseBlocks(); } - return rates; + values.sort((v1, v2) -> { + for (int i = 0; i < v1.size(); i++) { + if (v1.get(i) instanceof BytesRef b1) { + int cmp = b1.compareTo((BytesRef) v2.get(i)); + if (cmp != 0) { + return cmp; + } + } else if (v1.get(i) instanceof Long b1) { + int cmp = b1.compareTo((Long) v2.get(i)); + if (cmp != 0) { + return -cmp; + } + } + } + return 0; + }); + return values; } - - record Group(String tsidHash, long timestampInterval) {} } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java index bdaa045633dc0..51332b3c8997a 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java @@ -55,6 +55,7 @@ import java.util.Collections; import java.util.List; import java.util.Set; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Function; import java.util.function.Supplier; @@ -94,6 +95,8 @@ public void testBasic() throws Exception { ExchangeSink sink1 = sinkExchanger.createExchangeSink(); ExchangeSink sink2 = sinkExchanger.createExchangeSink(); ExchangeSourceHandler sourceExchanger = new ExchangeSourceHandler(3, threadPool.executor(ESQL_TEST_EXECUTOR)); + PlainActionFuture sourceCompletion = new PlainActionFuture<>(); + sourceExchanger.addCompletionListener(sourceCompletion); ExchangeSource source = sourceExchanger.createExchangeSource(); sourceExchanger.addRemoteSink(sinkExchanger::fetchPageAsync, 1); SubscribableListener waitForReading = source.waitForReading(); @@ -133,7 +136,9 @@ public void testBasic() throws Exception { sink2.finish(); assertTrue(sink2.isFinished()); assertTrue(source.isFinished()); + assertFalse(sourceCompletion.isDone()); source.finish(); + sourceCompletion.actionGet(10, TimeUnit.SECONDS); ESTestCase.terminate(threadPool); for (Page page : pages) { page.releaseBlocks(); @@ -320,7 +325,9 @@ protected void start(Driver driver, ActionListener listener) { public void testConcurrentWithHandlers() { BlockFactory blockFactory = blockFactory(); + PlainActionFuture sourceCompletionFuture = new PlainActionFuture<>(); var sourceExchanger = new ExchangeSourceHandler(randomExchangeBuffer(), threadPool.executor(ESQL_TEST_EXECUTOR)); + sourceExchanger.addCompletionListener(sourceCompletionFuture); List sinkHandlers = new ArrayList<>(); Supplier exchangeSink = () -> { final ExchangeSinkHandler sinkHandler; @@ -336,6 +343,7 @@ public void testConcurrentWithHandlers() { final int maxInputSeqNo = rarely() ? -1 : randomIntBetween(0, 50_000); final int maxOutputSeqNo = rarely() ? -1 : randomIntBetween(0, 50_000); runConcurrentTest(maxInputSeqNo, maxOutputSeqNo, sourceExchanger::createExchangeSource, exchangeSink); + sourceCompletionFuture.actionGet(10, TimeUnit.SECONDS); } public void testEarlyTerminate() { @@ -358,7 +366,7 @@ public void testEarlyTerminate() { assertTrue(sink.isFinished()); } - public void testConcurrentWithTransportActions() throws Exception { + public void testConcurrentWithTransportActions() { MockTransportService node0 = newTransportService(); ExchangeService exchange0 = new ExchangeService(Settings.EMPTY, threadPool, ESQL_TEST_EXECUTOR, blockFactory()); exchange0.registerTransportHandler(node0); @@ -371,12 +379,15 @@ public void testConcurrentWithTransportActions() throws Exception { String exchangeId = "exchange"; Task task = new Task(1, "", "", "", null, Collections.emptyMap()); var sourceHandler = new ExchangeSourceHandler(randomExchangeBuffer(), threadPool.executor(ESQL_TEST_EXECUTOR)); + PlainActionFuture sourceCompletionFuture = new PlainActionFuture<>(); + sourceHandler.addCompletionListener(sourceCompletionFuture); ExchangeSinkHandler sinkHandler = exchange1.createSinkHandler(exchangeId, randomExchangeBuffer()); Transport.Connection connection = node0.getConnection(node1.getLocalNode()); sourceHandler.addRemoteSink(exchange0.newRemoteSink(task, exchangeId, node0, connection), randomIntBetween(1, 5)); final int maxInputSeqNo = rarely() ? -1 : randomIntBetween(0, 50_000); final int maxOutputSeqNo = rarely() ? -1 : randomIntBetween(0, 50_000); runConcurrentTest(maxInputSeqNo, maxOutputSeqNo, sourceHandler::createExchangeSource, sinkHandler::createExchangeSink); + sourceCompletionFuture.actionGet(10, TimeUnit.SECONDS); } } @@ -427,6 +438,8 @@ public void sendResponse(TransportResponse transportResponse) { String exchangeId = "exchange"; Task task = new Task(1, "", "", "", null, Collections.emptyMap()); var sourceHandler = new ExchangeSourceHandler(randomIntBetween(1, 128), threadPool.executor(ESQL_TEST_EXECUTOR)); + PlainActionFuture sourceCompletionFuture = new PlainActionFuture<>(); + sourceHandler.addCompletionListener(sourceCompletionFuture); ExchangeSinkHandler sinkHandler = exchange1.createSinkHandler(exchangeId, randomIntBetween(1, 128)); Transport.Connection connection = node0.getConnection(node1.getLocalDiscoNode()); sourceHandler.addRemoteSink(exchange0.newRemoteSink(task, exchangeId, node0, connection), randomIntBetween(1, 5)); @@ -438,6 +451,7 @@ public void sendResponse(TransportResponse transportResponse) { assertNotNull(cause); assertThat(cause.getMessage(), equalTo("page is too large")); sinkHandler.onFailure(new RuntimeException(cause)); + sourceCompletionFuture.actionGet(10, TimeUnit.SECONDS); } } diff --git a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java index ca084ab26908d..8f13dd53a0d21 100644 --- a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java +++ b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java @@ -67,7 +67,7 @@ public class MultiClusterSpecIT extends EsqlSpecTestCase { public static TestRule clusterRule = RuleChain.outerRule(remoteCluster).around(localCluster); private static TestFeatureService remoteFeaturesService; - private static RestClient remoteFeaturesServiceClient; + private static RestClient remoteClusterClient; @ParametersFactory(argumentFormatting = "%2$s.%3$s") public static List readScriptSpec() throws Exception { @@ -95,30 +95,34 @@ public MultiClusterSpecIT(String fileName, String groupName, String testName, In @Override protected void shouldSkipTest(String testName) throws IOException { super.shouldSkipTest(testName); - for (String feature : testCase.requiredFeatures) { - assumeTrue("Test " + testName + " requires " + feature, remoteFeaturesService().clusterHasFeature(feature)); - } + checkCapabilities(remoteClusterClient(), remoteFeaturesService(), testName, testCase); assumeFalse("can't test with _index metadata", hasIndexMetadata(testCase.query)); assumeTrue("Test " + testName + " is skipped on " + Clusters.oldVersion(), isEnabled(testName, Clusters.oldVersion())); } private TestFeatureService remoteFeaturesService() throws IOException { if (remoteFeaturesService == null) { - HttpHost[] remoteHosts = parseClusterHosts(remoteCluster.getHttpAddresses()).toArray(HttpHost[]::new); - remoteFeaturesServiceClient = super.buildClient(restAdminSettings(), remoteHosts); - var remoteNodeVersions = readVersionsFromNodesInfo(remoteFeaturesServiceClient); + var remoteNodeVersions = readVersionsFromNodesInfo(remoteClusterClient()); var semanticNodeVersions = remoteNodeVersions.stream() .map(ESRestTestCase::parseLegacyVersion) .flatMap(Optional::stream) .collect(Collectors.toSet()); - remoteFeaturesService = createTestFeatureService(getClusterStateFeatures(remoteFeaturesServiceClient), semanticNodeVersions); + remoteFeaturesService = createTestFeatureService(getClusterStateFeatures(remoteClusterClient()), semanticNodeVersions); } return remoteFeaturesService; } + private RestClient remoteClusterClient() throws IOException { + if (remoteClusterClient == null) { + HttpHost[] remoteHosts = parseClusterHosts(remoteCluster.getHttpAddresses()).toArray(HttpHost[]::new); + remoteClusterClient = super.buildClient(restAdminSettings(), remoteHosts); + } + return remoteClusterClient; + } + @AfterClass public static void closeRemoveFeaturesService() throws IOException { - IOUtils.close(remoteFeaturesServiceClient); + IOUtils.close(remoteClusterClient); } @Override diff --git a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java index 4f43e54a82546..072dc5265fe60 100644 --- a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java +++ b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java @@ -126,6 +126,7 @@ public void testDoNotLogWithInfo() throws IOException { } } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/108367") public void testDoLogWithDebug() throws IOException { try { setLoggingLevel("DEBUG"); diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java index 448d39913a8f6..0b653a1d92106 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java @@ -7,12 +7,15 @@ package org.elasticsearch.xpack.esql.qa.rest; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite; import org.apache.http.HttpEntity; +import org.apache.lucene.tests.util.TimeUnits; import org.elasticsearch.Build; import org.elasticsearch.Version; import org.elasticsearch.client.Request; import org.elasticsearch.client.ResponseException; +import org.elasticsearch.client.RestClient; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.geometry.Geometry; import org.elasticsearch.geometry.Point; @@ -21,6 +24,7 @@ import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.TestFeatureService; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.esql.CsvTestUtils; import org.elasticsearch.xpack.esql.qa.rest.RestEsqlTestCase.RequestObjectBuilder; @@ -56,6 +60,8 @@ import static org.elasticsearch.xpack.ql.CsvSpecReader.specParser; import static org.elasticsearch.xpack.ql.TestUtils.classpathResources; +// This test can run very long in serverless configurations +@TimeoutSuite(millis = 30 * TimeUnits.MINUTE) public abstract class EsqlSpecTestCase extends ESRestTestCase { // To avoid referencing the main module, we replicate EsqlFeatures.ASYNC_QUERY.id() here @@ -146,12 +152,41 @@ public final void test() throws Throwable { } protected void shouldSkipTest(String testName) throws IOException { - for (String feature : testCase.requiredFeatures) { - assumeTrue("Test " + testName + " requires " + feature, clusterHasFeature(feature)); - } + checkCapabilities(adminClient(), testFeatureService, testName, testCase); assumeTrue("Test " + testName + " is not enabled", isEnabled(testName, Version.CURRENT)); } + protected static void checkCapabilities(RestClient client, TestFeatureService testFeatureService, String testName, CsvTestCase testCase) + throws IOException { + if (testCase.requiredCapabilities.isEmpty()) { + return; + } + try { + if (clusterHasCapability(client, "POST", "/_query", List.of(), testCase.requiredCapabilities).orElse(false)) { + return; + } + LOGGER.info("capabilities API returned false, we might be in a mixed version cluster so falling back to cluster features"); + } catch (ResponseException e) { + if (e.getResponse().getStatusLine().getStatusCode() / 100 == 4) { + /* + * The node we're testing against is too old for the capabilities + * API which means it has to be pretty old. Very old capabilities + * are ALSO present in the features API, so we can check them instead. + * + * It's kind of weird that we check for *any* 400, but that's required + * because old versions of Elasticsearch return 400, not the expected + * 404. + */ + LOGGER.info("capabilities API failed, falling back to cluster features"); + } else { + throw e; + } + } + for (String feature : testCase.requiredCapabilities) { + assumeTrue("Test " + testName + " requires " + feature, testFeatureService.clusterHasFeature("esql." + feature)); + } + } + protected final void doTest() throws Throwable { RequestObjectBuilder builder = new RequestObjectBuilder(randomFrom(XContentType.values())); diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/blog-IT_tests_only.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/blog.csv-spec similarity index 88% rename from x-pack/plugin/esql/qa/testFixtures/src/main/resources/blog-IT_tests_only.csv-spec rename to x-pack/plugin/esql/qa/testFixtures/src/main/resources/blog.csv-spec index 6ddc9601db4ac..64c4641b2ca01 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/blog-IT_tests_only.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/blog.csv-spec @@ -1,4 +1,7 @@ +# Examples that were published in a blog post + 2023-08-08.full-blown-query +required_feature: esql.enrich_load FROM employees | WHERE still_hired == true diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec index 721cff076aeaa..8d54288de552d 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec @@ -621,6 +621,40 @@ dt:datetime |plus_post:datetime |plus_pre:datetime 2100-01-01T01:01:01.001Z |null |null ; +datePlusQuarter +# "quarter" introduced in 8.15 +required_feature: esql.timespan_abbreviations +row dt = to_dt("2100-01-01T01:01:01.000Z") +| eval plusQuarter = dt + 2 quarters +; + +dt:datetime | plusQuarter:datetime +2100-01-01T01:01:01.000Z | 2100-07-01T01:01:01.000Z +; + +datePlusAbbreviatedDurations +# abbreviations introduced in 8.15 +required_feature: esql.timespan_abbreviations +row dt = to_dt("2100-01-01T00:00:00.000Z") +| eval plusDurations = dt + 1 h + 2 min + 2 sec + 1 s + 4 ms +; + +dt:datetime | plusDurations:datetime +2100-01-01T00:00:00.000Z | 2100-01-01T01:02:03.004Z +; + +datePlusAbbreviatedPeriods +# abbreviations introduced in 8.15 +required_feature: esql.timespan_abbreviations +row dt = to_dt("2100-01-01T00:00:00.000Z") +| eval plusDurations = dt + 0 yr + 1y + 2 q + 3 mo + 4 w + 3 d +; + +dt:datetime | plusDurations:datetime +2100-01-01T00:00:00.000Z | 2101-11-01T00:00:00.000Z +; + + dateMinusDuration row dt = to_dt("2100-01-01T01:01:01.001Z") | eval minus = dt - 1 hour - 1 minute - 1 second - 1 milliseconds; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs-IT_tests_only.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs-IT_tests_only.csv-spec deleted file mode 100644 index f4bf2333cae86..0000000000000 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs-IT_tests_only.csv-spec +++ /dev/null @@ -1,67 +0,0 @@ -// This file contains any ESQL snippets from the docs that don't have a home -// anywhere else. The Isle of Misfit Toys. When you need to add new examples -// for the docs you should try to convert an existing test first. Just add -// the comments in whatever file the test already lives in. If you have to -// write a new test to make an example in the docs then put it in whatever -// file matches it's "theme" best. Put it next to similar tests. Not here. - -// Also! When Nik originally extracted examples from the docs to make them -// testable he didn't spend a lot of time putting the docs into appropriate -// files. He just made this one. He didn't put his toys away. We'd be better -// off not adding to this strange toy-pile and instead moving things into -// the appropriate files. - -enrich -// tag::enrich[] -ROW language_code = "1" -| ENRICH languages_policy -// end::enrich[] -; - -// tag::enrich-result[] -language_code:keyword | language_name:keyword -1 | English -// end::enrich-result[] -; - - -enrichOn -// tag::enrich_on[] -ROW a = "1" -| ENRICH languages_policy ON a -// end::enrich_on[] -; - -// tag::enrich_on-result[] -a:keyword | language_name:keyword -1 | English -// end::enrich_on-result[] -; - - -enrichWith -// tag::enrich_with[] -ROW a = "1" -| ENRICH languages_policy ON a WITH language_name -// end::enrich_with[] -; - -// tag::enrich_with-result[] -a:keyword | language_name:keyword -1 | English -// end::enrich_with-result[] -; - - -enrichRename -// tag::enrich_rename[] -ROW a = "1" -| ENRICH languages_policy ON a WITH name = language_name -// end::enrich_rename[] -; - -// tag::enrich_rename-result[] -a:keyword | name:keyword -1 | English -// end::enrich_rename-result[] -; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich-IT_tests_only.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich-IT_tests_only.csv-spec deleted file mode 100644 index 367fbf044deed..0000000000000 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich-IT_tests_only.csv-spec +++ /dev/null @@ -1,350 +0,0 @@ -simple -row language_code = "1" -| enrich languages_policy -; - -language_code:keyword | language_name:keyword -1 | English -; - - -enrichOn -from employees | sort emp_no | limit 1 | eval x = to_string(languages) | enrich languages_policy on x | keep emp_no, language_name; - -emp_no:integer | language_name:keyword -10001 | French -; - - -enrichOn2 -from employees | eval x = to_string(languages) | enrich languages_policy on x | keep emp_no, language_name | sort emp_no | limit 1 ; - -emp_no:integer | language_name:keyword -10001 | French -; - -simpleSortLimit -from employees | eval x = to_string(languages) | enrich languages_policy on x | keep emp_no, language_name | sort emp_no | limit 1; - -emp_no:integer | language_name:keyword -10001 | French -; - - -with -from employees | eval x = to_string(languages) | keep emp_no, x | sort emp_no | limit 1 -| enrich languages_policy on x with language_name; - -emp_no:integer | x:keyword | language_name:keyword -10001 | 2 | French -; - - -withAlias -from employees | sort emp_no | limit 3 | eval x = to_string(languages) | keep emp_no, x -| enrich languages_policy on x with lang = language_name; - -emp_no:integer | x:keyword | lang:keyword -10001 | 2 | French -10002 | 5 | null -10003 | 4 | German -; - - -withAliasSort -from employees | eval x = to_string(languages) | keep emp_no, x | sort emp_no | limit 3 -| enrich languages_policy on x with lang = language_name; - -emp_no:integer | x:keyword | lang:keyword -10001 | 2 | French -10002 | 5 | null -10003 | 4 | German -; - - -withAliasOverwriteName#[skip:-8.13.0] -from employees | sort emp_no -| eval x = to_string(languages) | enrich languages_policy on x with emp_no = language_name -| keep emp_no | limit 1 -; - -emp_no:keyword -French -; - - -withAliasAndPlain -from employees | sort emp_no desc | limit 3 | eval x = to_string(languages) | keep emp_no, x -| enrich languages_policy on x with lang = language_name, language_name; - -emp_no:integer | x:keyword | lang:keyword | language_name:keyword -10100 | 4 | German | German -10099 | 2 | French | French -10098 | 4 | German | German -; - - -withTwoAliasesSameProp -from employees | sort emp_no | limit 1 | eval x = to_string(languages) | keep emp_no, x -| enrich languages_policy on x with lang = language_name, lang2 = language_name; - -emp_no:integer | x:keyword | lang:keyword | lang2:keyword -10001 | 2 | French | French -; - - -redundantWith -from employees | sort emp_no | limit 1 | eval x = to_string(languages) | keep emp_no, x -| enrich languages_policy on x with language_name, language_name; - -emp_no:integer | x:keyword | language_name:keyword -10001 | 2 | French -; - - -nullInput -from employees | where emp_no == 10017 | keep emp_no, gender -| enrich languages_policy on gender with language_name, language_name; - -emp_no:integer | gender:keyword | language_name:keyword -10017 | null | null -; - - -constantNullInput -from employees | where emp_no == 10020 | eval x = to_string(languages) | keep emp_no, x -| enrich languages_policy on x with language_name, language_name; - -emp_no:integer | x:keyword | language_name:keyword -10020 | null | null -; - - -multipleEnrich -row a = "1", b = "2", c = "10" -| enrich languages_policy on a with a_lang = language_name -| enrich languages_policy on b with b_lang = language_name -| enrich languages_policy on c with c_lang = language_name; - -a:keyword | b:keyword | c:keyword | a_lang:keyword | b_lang:keyword | c_lang:keyword -1 | 2 | 10 | English | French | null -; - - -enrichEval -from employees | eval x = to_string(languages) -| enrich languages_policy on x with lang = language_name -| eval language = concat(x, "-", lang) -| keep emp_no, x, lang, language -| sort emp_no desc | limit 3; - -emp_no:integer | x:keyword | lang:keyword | language:keyword -10100 | 4 | German | 4-German -10099 | 2 | French | 2-French -10098 | 4 | German | 4-German -; - - -multivalue -required_feature: esql.mv_sort -row a = ["1", "2"] | enrich languages_policy on a with a_lang = language_name | eval a_lang = mv_sort(a_lang); - -a:keyword | a_lang:keyword -["1", "2"] | ["English", "French"] -; - - -enrichCidr#[skip:-8.13.99, reason:enrich for cidr added in 8.14.0] -FROM sample_data -| ENRICH client_cidr_policy ON client_ip WITH env -| EVAL max_env = MV_MAX(env), count_env = MV_COUNT(env) -| KEEP client_ip, count_env, max_env -| SORT client_ip -; - -client_ip:ip | count_env:i | max_env:keyword -172.21.0.5 | 1 | Development -172.21.2.113 | 2 | QA -172.21.2.162 | 2 | QA -172.21.3.15 | 2 | Production -172.21.3.15 | 2 | Production -172.21.3.15 | 2 | Production -172.21.3.15 | 2 | Production -; - - -enrichCidr2#[skip:-8.99.99, reason:ip_range support not added yet] -FROM sample_data -| ENRICH client_cidr_policy ON client_ip WITH env, client_cidr -| KEEP client_ip, env, client_cidr -| SORT client_ip -; - -client_ip:ip | env:keyword | client_cidr:ip_range -172.21.3.15 | [Development, Production] | 172.21.3.0/24 -172.21.3.15 | [Development, Production] | 172.21.3.0/24 -172.21.3.15 | [Development, Production] | 172.21.3.0/24 -172.21.3.15 | [Development, Production] | 172.21.3.0/24 -172.21.0.5 | Development | 172.21.0.0/16 -172.21.2.113 | [Development, QA] | 172.21.2.0/24 -172.21.2.162 | [Development, QA] | 172.21.2.0/24 -; - - -enrichAgesStatsYear#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] -FROM employees -| WHERE birth_date > "1960-01-01" -| EVAL birth_year = DATE_EXTRACT("year", birth_date) -| EVAL age = 2022 - birth_year -| ENRICH ages_policy ON age WITH age_group = description -| STATS count=count(age_group) BY age_group, birth_year -| KEEP birth_year, age_group, count -| SORT birth_year DESC -; - -birth_year:long | age_group:keyword | count:long -1965 | Middle-aged | 1 -1964 | Middle-aged | 4 -1963 | Middle-aged | 7 -1962 | Senior | 6 -1961 | Senior | 8 -1960 | Senior | 8 -; - - -enrichAgesStatsAgeGroup#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] -FROM employees -| WHERE birth_date IS NOT NULL -| EVAL age = 2022 - DATE_EXTRACT("year", birth_date) -| ENRICH ages_policy ON age WITH age_group = description -| STATS count=count(age_group) BY age_group -| SORT count DESC -; - -count:long | age_group:keyword -78 | Senior -12 | Middle-aged -; - - -enrichHeightsStats#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] -FROM employees -| ENRICH heights_policy ON height WITH height_group = description -| STATS count=count(height_group), min=min(height), max=max(height) BY height_group -| KEEP height_group, min, max, count -| SORT min ASC -; - -height_group:k | min:double | max:double | count:long -Very Short | 1.41 | 1.48 | 9 -Short | 1.5 | 1.59 | 20 -Medium Height | 1.61 | 1.79 | 26 -Tall | 1.8 | 1.99 | 25 -Very Tall | 2.0 | 2.1 | 20 -; - - -enrichDecadesStats#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] -FROM employees -| ENRICH decades_policy ON birth_date WITH birth_decade = decade, birth_description = description -| ENRICH decades_policy ON hire_date WITH hire_decade = decade, hire_description = description -| STATS count=count(*) BY birth_decade, hire_decade, birth_description, hire_description -| KEEP birth_decade, hire_decade, birth_description, hire_description, count -| SORT birth_decade DESC, hire_decade DESC -; - -birth_decade:long | hire_decade:l | birth_description:k | hire_description:k | count:long -null | 1990 | null | Nineties Nostalgia | 6 -null | 1980 | null | Radical Eighties | 4 -1960 | 1990 | Swinging Sixties | Nineties Nostalgia | 13 -1960 | 1980 | Swinging Sixties | Radical Eighties | 21 -1950 | 1990 | Nifty Fifties | Nineties Nostalgia | 22 -1950 | 1980 | Nifty Fifties | Radical Eighties | 34 -; - - -spatialEnrichmentKeywordMatch#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] -FROM airports -| WHERE abbrev == "CPH" -| ENRICH city_names ON city WITH airport, region, city_boundary -| EVAL boundary_wkt_length = LENGTH(TO_STRING(city_boundary)) -| KEEP abbrev, city, city_location, country, location, name, airport, region, boundary_wkt_length -; - -abbrev:keyword | city:keyword | city_location:geo_point | country:keyword | location:geo_point | name:text | airport:text | region:text | boundary_wkt_length:integer -CPH | Copenhagen | POINT(12.5683 55.6761) | Denmark | POINT(12.6493508684508 55.6285017221528) | Copenhagen | Copenhagen | Københavns Kommune | 265 -; - - -spatialEnrichmentGeoMatch#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] -FROM airports -| WHERE abbrev == "CPH" -| ENRICH city_boundaries ON city_location WITH airport, region, city_boundary -| EVAL boundary_wkt_length = LENGTH(TO_STRING(city_boundary)) -| KEEP abbrev, city, city_location, country, location, name, airport, region, boundary_wkt_length -; - -abbrev:keyword | city:keyword | city_location:geo_point | country:keyword | location:geo_point | name:text | airport:text | region:text | boundary_wkt_length:integer -CPH | Copenhagen | POINT(12.5683 55.6761) | Denmark | POINT(12.6493508684508 55.6285017221528) | Copenhagen | Copenhagen | Københavns Kommune | 265 -; - - -spatialEnrichmentGeoMatchStats#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] -required_feature: esql.mv_warn - -FROM airports -| ENRICH city_boundaries ON city_location WITH airport, region, city_boundary -| EVAL boundary_wkt_length = LENGTH(TO_STRING(city_boundary)) -| STATS city_centroid = ST_CENTROID_AGG(city_location), count = COUNT(city_location), min_wkt = MIN(boundary_wkt_length), max_wkt = MAX(boundary_wkt_length) -; -warning:Line 3:30: evaluation of [LENGTH(TO_STRING(city_boundary))] failed, treating result as null. Only first 20 failures recorded. -warning:Line 3:30: java.lang.IllegalArgumentException: single-value function encountered multi-value - -city_centroid:geo_point | count:long | min_wkt:integer | max_wkt:integer -POINT(1.396561 24.127649) | 872 | 88 | 1044 -; - - -spatialEnrichmentKeywordMatchAndSpatialPredicate#[skip:-8.13.99, reason:st_intersects added in 8.14] -FROM airports -| ENRICH city_names ON city WITH airport, region, city_boundary -| MV_EXPAND city_boundary -| EVAL airport_in_city = ST_INTERSECTS(location, city_boundary) -| STATS count=COUNT(*) BY airport_in_city -| SORT count ASC -; - -count:long | airport_in_city:boolean -114 | null -396 | true -455 | false -; - - -spatialEnrichmentKeywordMatchAndSpatialAggregation#[skip:-8.13.99, reason:st_intersects added in 8.14] -FROM airports -| ENRICH city_names ON city WITH airport, region, city_boundary -| MV_EXPAND city_boundary -| EVAL airport_in_city = ST_INTERSECTS(location, city_boundary) -| STATS count=COUNT(*), centroid=ST_CENTROID_AGG(location) BY airport_in_city -| SORT count ASC -; - -count:long | centroid:geo_point | airport_in_city:boolean -114 | POINT (-24.750062 31.575549) | null -396 | POINT (-2.534797 20.667712) | true -455 | POINT (3.090752 27.676442) | false -; - - -spatialEnrichmentTextMatch#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] -FROM airports -| WHERE abbrev == "IDR" -| ENRICH city_airports ON name WITH city_name = city, region, city_boundary -| EVAL boundary_wkt_length = LENGTH(TO_STRING(city_boundary)) -| KEEP abbrev, city_name, city_location, country, location, name, name, region, boundary_wkt_length -; - -abbrev:k | city_name:k | city_location:geo_point | country:k | location:geo_point | name:text | region:text | boundary_wkt_length:i -IDR | Indore | POINT(75.8472 22.7167) | India | POINT(75.8092915005895 22.727749187571) | Devi Ahilyabai Holkar Int'l | Indore City | 231 -; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich.csv-spec index f5847260bbb16..f044989ec9cce 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich.csv-spec @@ -1,10 +1,10 @@ -simple +simpleNoLoad from employees | eval x = 1, y = to_string(languages) | enrich languages_policy on y | where x > 1 | keep emp_no, language_name | limit 1; emp_no:integer | language_name:keyword ; -docsGettingStartedEnrich +docsGettingStartedEnrichNoLoad // tag::gs-enrich[] FROM sample_data | KEEP @timestamp, client_ip, event_duration @@ -30,3 +30,458 @@ FROM sample_data median_duration:double | env:keyword ; + +simple +required_feature: esql.enrich_load + +// tag::enrich[] +ROW language_code = "1" +| ENRICH languages_policy +// end::enrich[] +; + +// tag::enrich-result[] +language_code:keyword | language_name:keyword +1 | English +// end::enrich-result[] +; + +enrichOnSimple +required_feature: esql.enrich_load + +// tag::enrich_on[] +ROW a = "1" +| ENRICH languages_policy ON a +// end::enrich_on[] +; + +// tag::enrich_on-result[] +a:keyword | language_name:keyword +1 | English +// end::enrich_on-result[] +; + + +enrichOn +required_feature: esql.enrich_load + +from employees | sort emp_no | limit 1 | eval x = to_string(languages) | enrich languages_policy on x | keep emp_no, language_name; + +emp_no:integer | language_name:keyword +10001 | French +; + + +enrichOn2 +required_feature: esql.enrich_load + +from employees | eval x = to_string(languages) | enrich languages_policy on x | keep emp_no, language_name | sort emp_no | limit 1 ; + +emp_no:integer | language_name:keyword +10001 | French +; + + +simpleSortLimit +required_feature: esql.enrich_load + +from employees | eval x = to_string(languages) | enrich languages_policy on x | keep emp_no, language_name | sort emp_no | limit 1; + +emp_no:integer | language_name:keyword +10001 | French +; + +with +required_feature: esql.enrich_load + +from employees | eval x = to_string(languages) | keep emp_no, x | sort emp_no | limit 1 +| enrich languages_policy on x with language_name; + +emp_no:integer | x:keyword | language_name:keyword +10001 | 2 | French +; + + +withSimple +required_feature: esql.enrich_load + +// tag::enrich_with[] +ROW a = "1" +| ENRICH languages_policy ON a WITH language_name +// end::enrich_with[] +; + +// tag::enrich_with-result[] +a:keyword | language_name:keyword +1 | English +// end::enrich_with-result[] +; + + +withAlias +required_feature: esql.enrich_load + +from employees | sort emp_no | limit 3 | eval x = to_string(languages) | keep emp_no, x +| enrich languages_policy on x with lang = language_name; + +emp_no:integer | x:keyword | lang:keyword +10001 | 2 | French +10002 | 5 | null +10003 | 4 | German +; + +withAliasSimple +required_feature: esql.enrich_load + +// tag::enrich_rename[] +ROW a = "1" +| ENRICH languages_policy ON a WITH name = language_name +// end::enrich_rename[] +; + +// tag::enrich_rename-result[] +a:keyword | name:keyword +1 | English +// end::enrich_rename-result[] +; + + +withAliasSort +required_feature: esql.enrich_load + +from employees | eval x = to_string(languages) | keep emp_no, x | sort emp_no | limit 3 +| enrich languages_policy on x with lang = language_name; + +emp_no:integer | x:keyword | lang:keyword +10001 | 2 | French +10002 | 5 | null +10003 | 4 | German +; + + +withAliasOverwriteName#[skip:-8.13.0] +required_feature: esql.enrich_load + +from employees | sort emp_no +| eval x = to_string(languages) | enrich languages_policy on x with emp_no = language_name +| keep emp_no | limit 1 +; + +emp_no:keyword +French +; + +withAliasAndPlain +required_feature: esql.enrich_load + +from employees | sort emp_no desc | limit 3 | eval x = to_string(languages) | keep emp_no, x +| enrich languages_policy on x with lang = language_name, language_name; + +emp_no:integer | x:keyword | lang:keyword | language_name:keyword +10100 | 4 | German | German +10099 | 2 | French | French +10098 | 4 | German | German +; + + +withTwoAliasesSameProp +required_feature: esql.enrich_load + +from employees | sort emp_no | limit 1 | eval x = to_string(languages) | keep emp_no, x +| enrich languages_policy on x with lang = language_name, lang2 = language_name; + +emp_no:integer | x:keyword | lang:keyword | lang2:keyword +10001 | 2 | French | French +; + + +redundantWith +required_feature: esql.enrich_load + +from employees | sort emp_no | limit 1 | eval x = to_string(languages) | keep emp_no, x +| enrich languages_policy on x with language_name, language_name; + +emp_no:integer | x:keyword | language_name:keyword +10001 | 2 | French +; + + +nullInput +required_feature: esql.enrich_load + +from employees | where emp_no == 10017 | keep emp_no, gender +| enrich languages_policy on gender with language_name, language_name; + +emp_no:integer | gender:keyword | language_name:keyword +10017 | null | null +; + + +constantNullInput +required_feature: esql.enrich_load + +from employees | where emp_no == 10020 | eval x = to_string(languages) | keep emp_no, x +| enrich languages_policy on x with language_name, language_name; + +emp_no:integer | x:keyword | language_name:keyword +10020 | null | null +; + + +multipleEnrich +required_feature: esql.enrich_load + +row a = "1", b = "2", c = "10" +| enrich languages_policy on a with a_lang = language_name +| enrich languages_policy on b with b_lang = language_name +| enrich languages_policy on c with c_lang = language_name; + +a:keyword | b:keyword | c:keyword | a_lang:keyword | b_lang:keyword | c_lang:keyword +1 | 2 | 10 | English | French | null +; + + +enrichEval +required_feature: esql.enrich_load + +from employees | eval x = to_string(languages) +| enrich languages_policy on x with lang = language_name +| eval language = concat(x, "-", lang) +| keep emp_no, x, lang, language +| sort emp_no desc | limit 3; + +emp_no:integer | x:keyword | lang:keyword | language:keyword +10100 | 4 | German | 4-German +10099 | 2 | French | 2-French +10098 | 4 | German | 4-German +; + + +multivalue +required_feature: esql.enrich_load +required_feature: esql.mv_sort + +row a = ["1", "2"] | enrich languages_policy on a with a_lang = language_name | eval a_lang = mv_sort(a_lang); + +a:keyword | a_lang:keyword +["1", "2"] | ["English", "French"] +; + + +enrichCidr#[skip:-8.13.99, reason:enrich for cidr added in 8.14.0] +required_feature: esql.enrich_load + +FROM sample_data +| ENRICH client_cidr_policy ON client_ip WITH env +| EVAL max_env = MV_MAX(env), count_env = MV_COUNT(env) +| KEEP client_ip, count_env, max_env +| SORT client_ip +; + +client_ip:ip | count_env:i | max_env:keyword +172.21.0.5 | 1 | Development +172.21.2.113 | 2 | QA +172.21.2.162 | 2 | QA +172.21.3.15 | 2 | Production +172.21.3.15 | 2 | Production +172.21.3.15 | 2 | Production +172.21.3.15 | 2 | Production +; + + +enrichCidr2#[skip:-8.99.99, reason:ip_range support not added yet] +required_feature: esql.enrich_load + +FROM sample_data +| ENRICH client_cidr_policy ON client_ip WITH env, client_cidr +| KEEP client_ip, env, client_cidr +| SORT client_ip +; + +client_ip:ip | env:keyword | client_cidr:ip_range +172.21.3.15 | [Development, Production] | 172.21.3.0/24 +172.21.3.15 | [Development, Production] | 172.21.3.0/24 +172.21.3.15 | [Development, Production] | 172.21.3.0/24 +172.21.3.15 | [Development, Production] | 172.21.3.0/24 +172.21.0.5 | Development | 172.21.0.0/16 +172.21.2.113 | [Development, QA] | 172.21.2.0/24 +172.21.2.162 | [Development, QA] | 172.21.2.0/24 +; + + +enrichAgesStatsYear#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] +required_feature: esql.enrich_load + +FROM employees +| WHERE birth_date > "1960-01-01" +| EVAL birth_year = DATE_EXTRACT("year", birth_date) +| EVAL age = 2022 - birth_year +| ENRICH ages_policy ON age WITH age_group = description +| STATS count=count(age_group) BY age_group, birth_year +| KEEP birth_year, age_group, count +| SORT birth_year DESC +; + +birth_year:long | age_group:keyword | count:long +1965 | Middle-aged | 1 +1964 | Middle-aged | 4 +1963 | Middle-aged | 7 +1962 | Senior | 6 +1961 | Senior | 8 +1960 | Senior | 8 +; + + +enrichAgesStatsAgeGroup#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] +required_feature: esql.enrich_load + +FROM employees +| WHERE birth_date IS NOT NULL +| EVAL age = 2022 - DATE_EXTRACT("year", birth_date) +| ENRICH ages_policy ON age WITH age_group = description +| STATS count=count(age_group) BY age_group +| SORT count DESC +; + +count:long | age_group:keyword +78 | Senior +12 | Middle-aged +; + + +enrichHeightsStats#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] +required_feature: esql.enrich_load + +FROM employees +| ENRICH heights_policy ON height WITH height_group = description +| STATS count=count(height_group), min=min(height), max=max(height) BY height_group +| KEEP height_group, min, max, count +| SORT min ASC +; + +height_group:k | min:double | max:double | count:long +Very Short | 1.41 | 1.48 | 9 +Short | 1.5 | 1.59 | 20 +Medium Height | 1.61 | 1.79 | 26 +Tall | 1.8 | 1.99 | 25 +Very Tall | 2.0 | 2.1 | 20 +; + + +enrichDecadesStats#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] +required_feature: esql.enrich_load + +FROM employees +| ENRICH decades_policy ON birth_date WITH birth_decade = decade, birth_description = description +| ENRICH decades_policy ON hire_date WITH hire_decade = decade, hire_description = description +| STATS count=count(*) BY birth_decade, hire_decade, birth_description, hire_description +| KEEP birth_decade, hire_decade, birth_description, hire_description, count +| SORT birth_decade DESC, hire_decade DESC +; + +birth_decade:long | hire_decade:l | birth_description:k | hire_description:k | count:long +null | 1990 | null | Nineties Nostalgia | 6 +null | 1980 | null | Radical Eighties | 4 +1960 | 1990 | Swinging Sixties | Nineties Nostalgia | 13 +1960 | 1980 | Swinging Sixties | Radical Eighties | 21 +1950 | 1990 | Nifty Fifties | Nineties Nostalgia | 22 +1950 | 1980 | Nifty Fifties | Radical Eighties | 34 +; + + +spatialEnrichmentKeywordMatch#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] +required_feature: esql.enrich_load + +FROM airports +| WHERE abbrev == "CPH" +| ENRICH city_names ON city WITH airport, region, city_boundary +| EVAL boundary_wkt_length = LENGTH(TO_STRING(city_boundary)) +| KEEP abbrev, city, city_location, country, location, name, airport, region, boundary_wkt_length +; + +abbrev:keyword | city:keyword | city_location:geo_point | country:keyword | location:geo_point | name:text | airport:text | region:text | boundary_wkt_length:integer +CPH | Copenhagen | POINT(12.5683 55.6761) | Denmark | POINT(12.6493508684508 55.6285017221528) | Copenhagen | Copenhagen | Københavns Kommune | 265 +; + + +spatialEnrichmentGeoMatch#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] +required_feature: esql.enrich_load + +FROM airports +| WHERE abbrev == "CPH" +| ENRICH city_boundaries ON city_location WITH airport, region, city_boundary +| EVAL boundary_wkt_length = LENGTH(TO_STRING(city_boundary)) +| KEEP abbrev, city, city_location, country, location, name, airport, region, boundary_wkt_length +; + +abbrev:keyword | city:keyword | city_location:geo_point | country:keyword | location:geo_point | name:text | airport:text | region:text | boundary_wkt_length:integer +CPH | Copenhagen | POINT(12.5683 55.6761) | Denmark | POINT(12.6493508684508 55.6285017221528) | Copenhagen | Copenhagen | Københavns Kommune | 265 +; + + +spatialEnrichmentGeoMatchStats#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] +required_feature: esql.enrich_load +required_feature: esql.mv_warn + +FROM airports +| ENRICH city_boundaries ON city_location WITH airport, region, city_boundary +| EVAL boundary_wkt_length = LENGTH(TO_STRING(city_boundary)) +| STATS city_centroid = ST_CENTROID_AGG(city_location), count = COUNT(city_location), min_wkt = MIN(boundary_wkt_length), max_wkt = MAX(boundary_wkt_length) +; +warning:Line 3:30: evaluation of [LENGTH(TO_STRING(city_boundary))] failed, treating result as null. Only first 20 failures recorded. +warning:Line 3:30: java.lang.IllegalArgumentException: single-value function encountered multi-value + +city_centroid:geo_point | count:long | min_wkt:integer | max_wkt:integer +POINT(1.396561 24.127649) | 872 | 88 | 1044 +; + + +spatialEnrichmentKeywordMatchAndSpatialPredicate#[skip:-8.13.99, reason:st_intersects added in 8.14] +required_feature: esql.enrich_load + +FROM airports +| ENRICH city_names ON city WITH airport, region, city_boundary +| MV_EXPAND city_boundary +| EVAL airport_in_city = ST_INTERSECTS(location, city_boundary) +| STATS count=COUNT(*) BY airport_in_city +| SORT count ASC +; + +count:long | airport_in_city:boolean +114 | null +396 | true +455 | false +; + + +spatialEnrichmentKeywordMatchAndSpatialAggregation#[skip:-8.13.99, reason:st_intersects added in 8.14] +required_feature: esql.enrich_load + +FROM airports +| ENRICH city_names ON city WITH airport, region, city_boundary +| MV_EXPAND city_boundary +| EVAL airport_in_city = ST_INTERSECTS(location, city_boundary) +| STATS count=COUNT(*), centroid=ST_CENTROID_AGG(location) BY airport_in_city +| SORT count ASC +; + +count:long | centroid:geo_point | airport_in_city:boolean +114 | POINT (-24.750062 31.575549) | null +396 | POINT (-2.534797 20.667712) | true +455 | POINT (3.090752 27.676442) | false +; + + +spatialEnrichmentTextMatch#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] +required_feature: esql.enrich_load + +FROM airports +| WHERE abbrev == "IDR" +| ENRICH city_airports ON name WITH city_name = city, region, city_boundary +| EVAL boundary_wkt_length = LENGTH(TO_STRING(city_boundary)) +| KEEP abbrev, city_name, city_location, country, location, name, name, region, boundary_wkt_length +; + +abbrev:k | city_name:k | city_location:geo_point | country:k | location:geo_point | name:text | region:text | boundary_wkt_length:i +IDR | Indore | POINT(75.8472 22.7167) | India | POINT(75.8092915005895 22.727749187571) | Devi Ahilyabai Holkar Int'l | Indore City | 231 +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec index 1d523640731d7..bd52d3b26b336 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec @@ -28,8 +28,8 @@ double e() "boolean ends_with(str:keyword|text, suffix:keyword|text)" "double|integer|long|unsigned_long floor(number:double|integer|long|unsigned_long)" "keyword from_base64(string:keyword|text)" -"integer|long|double|boolean|keyword|text|ip|version greatest(first:integer|long|double|boolean|keyword|text|ip|version, ?rest...:integer|long|double|boolean|keyword|text|ip|version)" -"integer|long|double|boolean|keyword|text|ip|version least(first:integer|long|double|boolean|keyword|text|ip|version, ?rest...:integer|long|double|boolean|keyword|text|ip|version)" +"boolean|double|integer|ip|keyword|long|text|version greatest(first:boolean|double|integer|ip|keyword|long|text|version, ?rest...:boolean|double|integer|ip|keyword|long|text|version)" +"boolean|double|integer|ip|keyword|long|text|version least(first:boolean|double|integer|ip|keyword|long|text|version, ?rest...:boolean|double|integer|ip|keyword|long|text|version)" "keyword left(string:keyword|text, length:integer)" "integer length(string:keyword|text)" "integer locate(string:keyword|text, substring:keyword|text, ?start:integer)" @@ -123,10 +123,10 @@ atan2 |[y_coordinate, x_coordinate] |["double|integer|long|unsign avg |number |"double|integer|long" |[""] bin |[field, buckets, from, to] |["integer|long|double|date", "integer|double|date_period|time_duration", "integer|long|double|date", "integer|long|double|date"] |[Numeric or date expression from which to derive buckets., Target number of buckets., Start of the range. Can be a number or a date expressed as a string., End of the range. Can be a number or a date expressed as a string.] bucket |[field, buckets, from, to] |["integer|long|double|date", "integer|double|date_period|time_duration", "integer|long|double|date", "integer|long|double|date"] |[Numeric or date expression from which to derive buckets., Target number of buckets., Start of the range. Can be a number or a date expressed as a string., End of the range. Can be a number or a date expressed as a string.] -case |[condition, trueValue] |[boolean, "boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version"] |["", ""] +case |[condition, trueValue] |[boolean, "boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version"] |[A condition., The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches.] ceil |number |"double|integer|long|unsigned_long" |Numeric expression. If `null`, the function returns `null`. cidr_match |[ip, blockX] |[ip, "keyword|text"] |[IP address of type `ip` (both IPv4 and IPv6 are supported)., CIDR block to test the IP against.] -coalesce |first |"boolean|text|integer|keyword|long" |Expression to evaluate +coalesce |first |"boolean|text|integer|keyword|long" |Expression to evaluate. concat |[string1, string2] |["keyword|text", "keyword|text"] |[Strings to concatenate., Strings to concatenate.] cos |angle |"double|integer|long|unsigned_long" |An angle, in radians. If `null`, the function returns `null`. cosh |angle |"double|integer|long|unsigned_long" |An angle, in radians. If `null`, the function returns `null`. @@ -141,8 +141,8 @@ e |null |null ends_with |[str, suffix] |["keyword|text", "keyword|text"] |[String expression. If `null`\, the function returns `null`., String expression. If `null`\, the function returns `null`.] floor |number |"double|integer|long|unsigned_long" |Numeric expression. If `null`, the function returns `null`. from_base64 |string |"keyword|text" |A base64 string. -greatest |first |"integer|long|double|boolean|keyword|text|ip|version" |[""] -least |first |"integer|long|double|boolean|keyword|text|ip|version" |[""] +greatest |first |"boolean|double|integer|ip|keyword|long|text|version" |First of the columns to evaluate. +least |first |"boolean|double|integer|ip|keyword|long|text|version" |First of the columns to evaluate. left |[string, length] |["keyword|text", integer] |[The string from which to return a substring., The number of characters to return.] length |string |"keyword|text" |String expression. If `null`, the function returns `null`. locate |[string, substring, start] |["keyword|text", "keyword|text", "integer"] |[An input string, A substring to locate in the input string, The start index] @@ -180,12 +180,12 @@ sinh |angle |"double|integer|long|unsigne split |[string, delim] |["keyword|text", "keyword|text"] |[String expression. If `null`\, the function returns `null`., Delimiter. Only single byte delimiters are currently supported.] sqrt |number |"double|integer|long|unsigned_long" |"Numeric expression. If `null`, the function returns `null`." st_centroid_ag|field |"geo_point|cartesian_point" |[""] -st_contains |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |[Geometry column name or variable of geometry type, Geometry column name or variable of geometry type] -st_disjoint |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |[Geometry column name or variable of geometry type, Geometry column name or variable of geometry type] -st_intersects |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |[Geometry column name or variable of geometry type, Geometry column name or variable of geometry type] -st_within |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |[Geometry column name or variable of geometry type, Geometry column name or variable of geometry type] -st_x |point |"geo_point|cartesian_point" |[""] -st_y |point |"geo_point|cartesian_point" |[""] +st_contains |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |[Expression of type `geo_point`\, `cartesian_point`\, `geo_shape` or `cartesian_shape`. If `null`\, the function returns `null`., Expression of type `geo_point`\, `cartesian_point`\, `geo_shape` or `cartesian_shape`. If `null`\, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters.] +st_disjoint |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |[Expression of type `geo_point`\, `cartesian_point`\, `geo_shape` or `cartesian_shape`. If `null`\, the function returns `null`., Expression of type `geo_point`\, `cartesian_point`\, `geo_shape` or `cartesian_shape`. If `null`\, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters.] +st_intersects |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |[Expression of type `geo_point`\, `cartesian_point`\, `geo_shape` or `cartesian_shape`. If `null`\, the function returns `null`., Expression of type `geo_point`\, `cartesian_point`\, `geo_shape` or `cartesian_shape`. If `null`\, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters.] +st_within |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |[Expression of type `geo_point`\, `cartesian_point`\, `geo_shape` or `cartesian_shape`. If `null`\, the function returns `null`., Expression of type `geo_point`\, `cartesian_point`\, `geo_shape` or `cartesian_shape`. If `null`\, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters.] +st_x |point |"geo_point|cartesian_point" |Expression of type `geo_point` or `cartesian_point`. If `null`, the function returns `null`. +st_y |point |"geo_point|cartesian_point" |Expression of type `geo_point` or `cartesian_point`. If `null`, the function returns `null`. starts_with |[str, prefix] |["keyword|text", "keyword|text"] |[String expression. If `null`\, the function returns `null`., String expression. If `null`\, the function returns `null`.] substring |[string, start, length] |["keyword|text", integer, integer] |[String expression. If `null`\, the function returns `null`., Start position., Length of the substring from the start position. Optional; if omitted\, all positions after `start` are returned.] sum |number |"double|integer|long" |[""] @@ -237,7 +237,7 @@ atan2 |The {wikipedia}/Atan2[angle] between the positive x-axis and the avg |The average of a numeric field. bin |Creates groups of values - buckets - out of a datetime or numeric input. The size of the buckets can either be provided directly, or chosen based on a recommended count and values range. bucket |Creates groups of values - buckets - out of a datetime or numeric input. The size of the buckets can either be provided directly, or chosen based on a recommended count and values range. -case |Accepts pairs of conditions and values. The function returns the value that belongs to the first condition that evaluates to true. +case |Accepts pairs of conditions and values. The function returns the value that belongs to the first condition that evaluates to `true`. If the number of arguments is odd, the last argument is the default value which is returned when no condition matches. If the number of arguments is even, and no condition matches, the function returns `null`. ceil |Round a number up to the nearest integer. cidr_match |Returns true if the provided IP is contained in one of the provided CIDR blocks. coalesce |Returns the first of its arguments that is not null. If all arguments are null, it returns `null`. @@ -255,8 +255,8 @@ e |Returns {wikipedia}/E_(mathematical_constant)[Euler's number]. ends_with |Returns a boolean that indicates whether a keyword string ends with another string. floor |Round a number down to the nearest integer. from_base64 |Decode a base64 string. -greatest |Returns the maximum value from many columns. -least |Returns the minimum value from many columns. +greatest |Returns the maximum value from multiple columns. This is similar to <> except it is intended to run on multiple columns at once. +least |Returns the minimum value from multiple columns. This is similar to <> except it is intended to run on multiple columns at once. left |Returns the substring that extracts 'length' chars from 'string' starting from the left. length |Returns the character length of a string. locate |Returns an integer that indicates the position of a keyword substring within another string @@ -294,12 +294,12 @@ sinh |Returns the {wikipedia}/Hyperbolic_functions[hyperbolic sine] of split |Split a single valued string into multiple strings. sqrt |Returns the square root of a number. The input can be any numeric value, the return value is always a double. Square roots of negative numbers and infinites are null. st_centroid_ag|The centroid of a spatial field. -st_contains |Returns whether the first geometry contains the second geometry. -st_disjoint |Returns whether the two geometries or geometry columns are disjoint. -st_intersects |Returns whether the two geometries or geometry columns intersect. -st_within |Returns whether the first geometry is within the second geometry. -st_x |Extracts the x-coordinate from a point geometry. -st_y |Extracts the y-coordinate from a point geometry. +st_contains |Returns whether the first geometry contains the second geometry. This is the inverse of the <> function. +st_disjoint |Returns whether the two geometries or geometry columns are disjoint. This is the inverse of the <> function. In mathematical terms: ST_Disjoint(A, B) ⇔ A ⋂ B = ∅ +st_intersects |Returns true if two geometries intersect. They intersect if they have any point in common, including their interior points (points along lines or within polygons). This is the inverse of the <> function. In mathematical terms: ST_Intersects(A, B) ⇔ A ⋂ B ≠ ∅ +st_within |Returns whether the first geometry is within the second geometry. This is the inverse of the <> function. +st_x |Extracts the `x` coordinate from the supplied point. If the points is of type `geo_point` this is equivalent to extracting the `longitude` value. +st_y |Extracts the `y` coordinate from the supplied point. If the points is of type `geo_point` this is equivalent to extracting the `latitude` value. starts_with |Returns a boolean that indicates whether a keyword string starts with another string. substring |Returns a substring of a string, specified by a start position and an optional length sum |The sum of a numeric field. @@ -370,8 +370,8 @@ e |double ends_with |boolean |[false, false] |false |false floor |"double|integer|long|unsigned_long" |false |false |false from_base64 |keyword |false |false |false -greatest |"integer|long|double|boolean|keyword|text|ip|version" |false |true |false -least |"integer|long|double|boolean|keyword|text|ip|version" |false |true |false +greatest |"boolean|double|integer|ip|keyword|long|text|version" |false |true |false +least |"boolean|double|integer|ip|keyword|long|text|version" |false |true |false left |keyword |[false, false] |false |false length |integer |false |false |false locate |integer |[false, false, true] |false |false diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java index 1bc9bd4766c2e..686fb831aa042 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java @@ -121,7 +121,6 @@ public void testRow() { } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/107347") public void testFromStatsGroupingAvgWithSort() { testFromStatsGroupingAvgImpl("from test | stats avg(count) by data | sort data | limit 2", "data", "avg(count)"); } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java index a1cd71da6c63b..d18bf0e23fd29 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java @@ -81,6 +81,7 @@ public class EsqlActionTaskIT extends AbstractPausableIntegTestCase { @Before public void setup() { assumeTrue("requires query pragmas", canUseQueryPragmas()); + nodeLevelReduction = randomBoolean(); READ_DESCRIPTION = """ \\_LuceneSourceOperator[dataPartitioning = SHARD, maxPageSize = pageSize(), limit = 2147483647] \\_ValuesSourceReaderOperator[fields = [pause_me]] @@ -92,10 +93,10 @@ public void setup() { \\_ProjectOperator[projection = [0]] \\_LimitOperator[limit = 1000] \\_OutputOperator[columns = [sum(pause_me)]]"""; - REDUCE_DESCRIPTION = """ - \\_ExchangeSourceOperator[] - \\_ExchangeSinkOperator"""; - nodeLevelReduction = randomBoolean(); + REDUCE_DESCRIPTION = "\\_ExchangeSourceOperator[]\n" + + (nodeLevelReduction ? "\\_AggregationOperator[mode = INTERMEDIATE, aggs = sum of longs]\n" : "") + + "\\_ExchangeSinkOperator"; + } public void testTaskContents() throws Exception { @@ -480,6 +481,37 @@ public void testTaskContentsForLimitQuery() throws Exception { } } + public void testTaskContentsForGroupingStatsQuery() throws Exception { + READ_DESCRIPTION = """ + \\_LuceneSourceOperator[dataPartitioning = SHARD, maxPageSize = pageSize(), limit = 2147483647] + \\_ValuesSourceReaderOperator[fields = [foo]] + \\_OrdinalsGroupingOperator(aggs = max of longs) + \\_ExchangeSinkOperator""".replace("pageSize()", Integer.toString(pageSize())); + MERGE_DESCRIPTION = """ + \\_ExchangeSourceOperator[] + \\_HashAggregationOperator[mode = , aggs = max of longs] + \\_ProjectOperator[projection = [1, 0]] + \\_LimitOperator[limit = 1000] + \\_OutputOperator[columns = [max(foo), pause_me]]"""; + REDUCE_DESCRIPTION = "\\_ExchangeSourceOperator[]\n" + + (nodeLevelReduction ? "\\_HashAggregationOperator[mode = , aggs = max of longs]\n" : "") + + "\\_ExchangeSinkOperator"; + + ActionFuture response = startEsql("from test | stats max(foo) by pause_me"); + try { + getTasksStarting(); + scriptPermits.release(pageSize()); + getTasksRunning(); + } finally { + scriptPermits.release(numberOfDocs()); + try (EsqlQueryResponse esqlResponse = response.get()) { + var it = Iterators.flatMap(esqlResponse.values(), i -> i); + assertThat(it.next(), equalTo(numberOfDocs() - 1L)); // max of numberOfDocs() generated int values + assertThat(it.next(), equalTo(1L)); // pause_me always emits 1 + } + } + } + @Override protected Collection> nodePlugins() { return CollectionUtils.appendToCopy(super.nodePlugins(), MockTransportService.TestPlugin.class); diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlDisruptionIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlDisruptionIT.java index 4bbcff44ec740..e005e2143522b 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlDisruptionIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlDisruptionIT.java @@ -8,12 +8,14 @@ package org.elasticsearch.xpack.esql.action; import org.elasticsearch.action.ActionFuture; +import org.elasticsearch.action.admin.cluster.node.tasks.list.TransportListTasksAction; import org.elasticsearch.cluster.coordination.Coordinator; import org.elasticsearch.cluster.coordination.FollowersChecker; import org.elasticsearch.cluster.coordination.LeaderChecker; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.compute.operator.exchange.ExchangeService; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.health.node.selection.HealthNode; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.disruption.NetworkDisruption; @@ -91,6 +93,21 @@ private EsqlQueryResponse runQueryWithDisruption(EsqlQueryRequest request) { try { return future.actionGet(2, TimeUnit.MINUTES); } catch (Exception e) { + logger.info( + "running tasks: {}", + client().admin() + .cluster() + .prepareListTasks() + .get() + .getTasks() + .stream() + .filter( + // Skip the tasks we that'd get in the way while debugging + t -> false == t.action().contains(TransportListTasksAction.TYPE.name()) + && false == t.action().contains(HealthNode.TASK_NAME) + ) + .toList() + ); assertTrue("request must be failed or completed after clearing disruption", future.isDone()); ensureBlocksReleased(); logger.info("--> failed to execute esql query with disruption; retrying...", e); diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TimeSeriesIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TimeSeriesIT.java index 406361438fc42..f82e554623085 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TimeSeriesIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TimeSeriesIT.java @@ -8,9 +8,17 @@ package org.elasticsearch.xpack.esql.action; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.mapper.DateFieldMapper; +import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.elasticsearch.xpack.esql.plugin.QueryPragmas; +import java.util.ArrayList; +import java.util.HashMap; import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; public class TimeSeriesIT extends AbstractEsqlIntegTestCase { @@ -37,6 +45,48 @@ public void testEmpty() { "type=long,time_series_metric=gauge" ) .get(); - run("FROM pods | LIMIT 1").close(); + run("METRICS pods | LIMIT 1").close(); + } + + public void testSimpleMetrics() { + Settings settings = Settings.builder().put("mode", "time_series").putList("routing_path", List.of("pod")).build(); + client().admin() + .indices() + .prepareCreate("pods") + .setSettings(settings) + .setMapping( + "@timestamp", + "type=date", + "pod", + "type=keyword,time_series_dimension=true", + "cpu", + "type=double,time_series_metric=gauge" + ) + .get(); + List pods = List.of("p1", "p2", "p3"); + long startTime = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis("2024-04-15T00:00:00Z"); + int numDocs = between(10, 10); + Map> cpus = new HashMap<>(); + for (int i = 0; i < numDocs; i++) { + String pod = randomFrom(pods); + int cpu = randomIntBetween(0, 100); + cpus.computeIfAbsent(pod, k -> new ArrayList<>()).add(cpu); + long timestamp = startTime + (1000L * i); + client().prepareIndex("pods").setSource("@timestamp", timestamp, "pod", pod, "cpu", cpu).get(); + } + List sortedGroups = cpus.keySet().stream().sorted().toList(); + client().admin().indices().prepareRefresh("pods").get(); + try (EsqlQueryResponse resp = run("METRICS pods load=avg(cpu) BY pod | SORT pod")) { + List> rows = EsqlTestUtils.getValuesList(resp); + assertThat(rows, hasSize(sortedGroups.size())); + for (int i = 0; i < rows.size(); i++) { + List r = rows.get(i); + String pod = (String) r.get(1); + assertThat(pod, equalTo(sortedGroups.get(i))); + List values = cpus.get(pod); + double avg = values.stream().mapToDouble(n -> n).sum() / values.size(); + assertThat((double) r.get(0), equalTo(avg)); + } + } } } diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 index c4a3dc7c56615..9f005db107aef 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 @@ -11,6 +11,7 @@ INLINESTATS : 'inlinestats' -> pushMode(EXPRESSION_MODE); KEEP : 'keep' -> pushMode(PROJECT_MODE); LIMIT : 'limit' -> pushMode(EXPRESSION_MODE); META : 'meta' -> pushMode(META_MODE); +METRICS : 'metrics' -> pushMode(METRICS_MODE); MV_EXPAND : 'mv_expand' -> pushMode(MVEXPAND_MODE); RENAME : 'rename' -> pushMode(RENAME_MODE); ROW : 'row' -> pushMode(EXPRESSION_MODE); @@ -31,6 +32,16 @@ MULTILINE_COMMENT WS : [ \r\n\t]+ -> channel(HIDDEN) ; + +fragment INDEX_UNQUOTED_IDENTIFIER_PART + : ~[=`|,[\]/ \t\r\n] + | '/' ~[*/] // allow single / but not followed by another / or * which would start a comment + ; + +INDEX_UNQUOTED_IDENTIFIER + : INDEX_UNQUOTED_IDENTIFIER_PART+ + ; + // // Explain // @@ -192,17 +203,8 @@ FROM_QUOTED_STRING : QUOTED_STRING -> type(QUOTED_STRING); OPTIONS : 'options'; METADATA : 'metadata'; -fragment FROM_UNQUOTED_IDENTIFIER_PART - : ~[=`|,[\]/ \t\r\n] - | '/' ~[*/] // allow single / but not followed by another / or * which would start a comment - ; - -FROM_UNQUOTED_IDENTIFIER - : FROM_UNQUOTED_IDENTIFIER_PART+ - ; - -FROM_QUOTED_IDENTIFIER - : QUOTED_IDENTIFIER -> type(QUOTED_IDENTIFIER) +FROM_INDEX_UNQUOTED_IDENTIFIER + : INDEX_UNQUOTED_IDENTIFIER -> type(INDEX_UNQUOTED_IDENTIFIER) ; FROM_LINE_COMMENT @@ -428,3 +430,60 @@ SETTING_WS : WS -> channel(HIDDEN) ; + +// +// METRICS command +// +mode METRICS_MODE; +METRICS_PIPE : PIPE -> type(PIPE), popMode; + +METRICS_INDEX_UNQUOTED_IDENTIFIER + : INDEX_UNQUOTED_IDENTIFIER -> type(INDEX_UNQUOTED_IDENTIFIER), popMode, pushMode(CLOSING_METRICS_MODE) + ; + +METRICS_LINE_COMMENT + : LINE_COMMENT -> channel(HIDDEN) + ; + +METRICS_MULTILINE_COMMENT + : MULTILINE_COMMENT -> channel(HIDDEN) + ; + +METRICS_WS + : WS -> channel(HIDDEN) + ; + +// TODO: remove this workaround mode - see https://github.com/elastic/elasticsearch/issues/108528 +mode CLOSING_METRICS_MODE; + +CLOSING_METRICS_COMMA + : COMMA -> type(COMMA), popMode, pushMode(METRICS_MODE) + ; + +CLOSING_METRICS_LINE_COMMENT + : LINE_COMMENT -> channel(HIDDEN) + ; + +CLOSING_METRICS_MULTILINE_COMMENT + : MULTILINE_COMMENT -> channel(HIDDEN) + ; + +CLOSING_METRICS_WS + : WS -> channel(HIDDEN) + ; + +CLOSING_METRICS_QUOTED_IDENTIFIER + : QUOTED_IDENTIFIER -> popMode, pushMode(EXPRESSION_MODE), type(QUOTED_IDENTIFIER) + ; + +CLOSING_METRICS_UNQUOTED_IDENTIFIER + :UNQUOTED_IDENTIFIER -> popMode, pushMode(EXPRESSION_MODE), type(UNQUOTED_IDENTIFIER) + ; + +CLOSING_METRICS_BY + :BY -> popMode, pushMode(EXPRESSION_MODE), type(BY) + ; + +CLOSING_METRICS_PIPE + : PIPE -> type(PIPE), popMode + ; diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens index b496aa68b61f7..15a8356d1b943 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens @@ -9,105 +9,112 @@ INLINESTATS=8 KEEP=9 LIMIT=10 META=11 -MV_EXPAND=12 -RENAME=13 -ROW=14 -SHOW=15 -SORT=16 -STATS=17 -WHERE=18 -UNKNOWN_CMD=19 -LINE_COMMENT=20 -MULTILINE_COMMENT=21 -WS=22 -EXPLAIN_WS=23 -EXPLAIN_LINE_COMMENT=24 -EXPLAIN_MULTILINE_COMMENT=25 -PIPE=26 -QUOTED_STRING=27 -INTEGER_LITERAL=28 -DECIMAL_LITERAL=29 -BY=30 -AND=31 -ASC=32 -ASSIGN=33 -CAST_OP=34 -COMMA=35 -DESC=36 -DOT=37 -FALSE=38 -FIRST=39 -LAST=40 -LP=41 -IN=42 -IS=43 -LIKE=44 -NOT=45 -NULL=46 -NULLS=47 -OR=48 -PARAM=49 -RLIKE=50 -RP=51 -TRUE=52 -EQ=53 -CIEQ=54 -NEQ=55 -LT=56 -LTE=57 -GT=58 -GTE=59 -PLUS=60 -MINUS=61 -ASTERISK=62 -SLASH=63 -PERCENT=64 -OPENING_BRACKET=65 -CLOSING_BRACKET=66 -UNQUOTED_IDENTIFIER=67 -QUOTED_IDENTIFIER=68 -EXPR_LINE_COMMENT=69 -EXPR_MULTILINE_COMMENT=70 -EXPR_WS=71 -OPTIONS=72 -METADATA=73 -FROM_UNQUOTED_IDENTIFIER=74 -FROM_LINE_COMMENT=75 -FROM_MULTILINE_COMMENT=76 -FROM_WS=77 -ID_PATTERN=78 -PROJECT_LINE_COMMENT=79 -PROJECT_MULTILINE_COMMENT=80 -PROJECT_WS=81 -AS=82 -RENAME_LINE_COMMENT=83 -RENAME_MULTILINE_COMMENT=84 -RENAME_WS=85 -ON=86 -WITH=87 -ENRICH_POLICY_NAME=88 -ENRICH_LINE_COMMENT=89 -ENRICH_MULTILINE_COMMENT=90 -ENRICH_WS=91 -ENRICH_FIELD_LINE_COMMENT=92 -ENRICH_FIELD_MULTILINE_COMMENT=93 -ENRICH_FIELD_WS=94 -MVEXPAND_LINE_COMMENT=95 -MVEXPAND_MULTILINE_COMMENT=96 -MVEXPAND_WS=97 -INFO=98 -SHOW_LINE_COMMENT=99 -SHOW_MULTILINE_COMMENT=100 -SHOW_WS=101 -FUNCTIONS=102 -META_LINE_COMMENT=103 -META_MULTILINE_COMMENT=104 -META_WS=105 -COLON=106 -SETTING=107 -SETTING_LINE_COMMENT=108 -SETTTING_MULTILINE_COMMENT=109 -SETTING_WS=110 +METRICS=12 +MV_EXPAND=13 +RENAME=14 +ROW=15 +SHOW=16 +SORT=17 +STATS=18 +WHERE=19 +UNKNOWN_CMD=20 +LINE_COMMENT=21 +MULTILINE_COMMENT=22 +WS=23 +INDEX_UNQUOTED_IDENTIFIER=24 +EXPLAIN_WS=25 +EXPLAIN_LINE_COMMENT=26 +EXPLAIN_MULTILINE_COMMENT=27 +PIPE=28 +QUOTED_STRING=29 +INTEGER_LITERAL=30 +DECIMAL_LITERAL=31 +BY=32 +AND=33 +ASC=34 +ASSIGN=35 +CAST_OP=36 +COMMA=37 +DESC=38 +DOT=39 +FALSE=40 +FIRST=41 +LAST=42 +LP=43 +IN=44 +IS=45 +LIKE=46 +NOT=47 +NULL=48 +NULLS=49 +OR=50 +PARAM=51 +RLIKE=52 +RP=53 +TRUE=54 +EQ=55 +CIEQ=56 +NEQ=57 +LT=58 +LTE=59 +GT=60 +GTE=61 +PLUS=62 +MINUS=63 +ASTERISK=64 +SLASH=65 +PERCENT=66 +OPENING_BRACKET=67 +CLOSING_BRACKET=68 +UNQUOTED_IDENTIFIER=69 +QUOTED_IDENTIFIER=70 +EXPR_LINE_COMMENT=71 +EXPR_MULTILINE_COMMENT=72 +EXPR_WS=73 +OPTIONS=74 +METADATA=75 +FROM_LINE_COMMENT=76 +FROM_MULTILINE_COMMENT=77 +FROM_WS=78 +ID_PATTERN=79 +PROJECT_LINE_COMMENT=80 +PROJECT_MULTILINE_COMMENT=81 +PROJECT_WS=82 +AS=83 +RENAME_LINE_COMMENT=84 +RENAME_MULTILINE_COMMENT=85 +RENAME_WS=86 +ON=87 +WITH=88 +ENRICH_POLICY_NAME=89 +ENRICH_LINE_COMMENT=90 +ENRICH_MULTILINE_COMMENT=91 +ENRICH_WS=92 +ENRICH_FIELD_LINE_COMMENT=93 +ENRICH_FIELD_MULTILINE_COMMENT=94 +ENRICH_FIELD_WS=95 +MVEXPAND_LINE_COMMENT=96 +MVEXPAND_MULTILINE_COMMENT=97 +MVEXPAND_WS=98 +INFO=99 +SHOW_LINE_COMMENT=100 +SHOW_MULTILINE_COMMENT=101 +SHOW_WS=102 +FUNCTIONS=103 +META_LINE_COMMENT=104 +META_MULTILINE_COMMENT=105 +META_WS=106 +COLON=107 +SETTING=108 +SETTING_LINE_COMMENT=109 +SETTTING_MULTILINE_COMMENT=110 +SETTING_WS=111 +METRICS_LINE_COMMENT=112 +METRICS_MULTILINE_COMMENT=113 +METRICS_WS=114 +CLOSING_METRICS_LINE_COMMENT=115 +CLOSING_METRICS_MULTILINE_COMMENT=116 +CLOSING_METRICS_WS=117 'dissect'=1 'drop'=2 'enrich'=3 @@ -119,55 +126,56 @@ SETTING_WS=110 'keep'=9 'limit'=10 'meta'=11 -'mv_expand'=12 -'rename'=13 -'row'=14 -'show'=15 -'sort'=16 -'stats'=17 -'where'=18 -'|'=26 -'by'=30 -'and'=31 -'asc'=32 -'='=33 -'::'=34 -','=35 -'desc'=36 -'.'=37 -'false'=38 -'first'=39 -'last'=40 -'('=41 -'in'=42 -'is'=43 -'like'=44 -'not'=45 -'null'=46 -'nulls'=47 -'or'=48 -'?'=49 -'rlike'=50 -')'=51 -'true'=52 -'=='=53 -'=~'=54 -'!='=55 -'<'=56 -'<='=57 -'>'=58 -'>='=59 -'+'=60 -'-'=61 -'*'=62 -'/'=63 -'%'=64 -']'=66 -'options'=72 -'metadata'=73 -'as'=82 -'on'=86 -'with'=87 -'info'=98 -'functions'=102 -':'=106 +'metrics'=12 +'mv_expand'=13 +'rename'=14 +'row'=15 +'show'=16 +'sort'=17 +'stats'=18 +'where'=19 +'|'=28 +'by'=32 +'and'=33 +'asc'=34 +'='=35 +'::'=36 +','=37 +'desc'=38 +'.'=39 +'false'=40 +'first'=41 +'last'=42 +'('=43 +'in'=44 +'is'=45 +'like'=46 +'not'=47 +'null'=48 +'nulls'=49 +'or'=50 +'?'=51 +'rlike'=52 +')'=53 +'true'=54 +'=='=55 +'=~'=56 +'!='=57 +'<'=58 +'<='=59 +'>'=60 +'>='=61 +'+'=62 +'-'=63 +'*'=64 +'/'=65 +'%'=66 +']'=68 +'options'=74 +'metadata'=75 +'as'=83 +'on'=87 +'with'=88 +'info'=99 +'functions'=103 +':'=107 diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 index 62dcc6ebd484b..e023991b74187 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 @@ -23,6 +23,7 @@ sourceCommand : explainCommand | fromCommand | rowCommand + | metricsCommand | showCommand | metaCommand ; @@ -104,12 +105,11 @@ field ; fromCommand - : FROM fromIdentifier (COMMA fromIdentifier)* metadata? fromOptions? + : FROM indexIdentifier (COMMA indexIdentifier)* metadata? fromOptions? ; -fromIdentifier - : FROM_UNQUOTED_IDENTIFIER - | QUOTED_IDENTIFIER +indexIdentifier + : INDEX_UNQUOTED_IDENTIFIER ; fromOptions @@ -126,13 +126,17 @@ metadata ; metadataOption - : METADATA fromIdentifier (COMMA fromIdentifier)* + : METADATA indexIdentifier (COMMA indexIdentifier)* ; deprecated_metadata : OPENING_BRACKET metadataOption CLOSING_BRACKET ; +metricsCommand + : METRICS indexIdentifier (COMMA indexIdentifier)* aggregates=fields? (BY grouping=fields)? + ; + evalCommand : EVAL fields ; diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens index b496aa68b61f7..15a8356d1b943 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens @@ -9,105 +9,112 @@ INLINESTATS=8 KEEP=9 LIMIT=10 META=11 -MV_EXPAND=12 -RENAME=13 -ROW=14 -SHOW=15 -SORT=16 -STATS=17 -WHERE=18 -UNKNOWN_CMD=19 -LINE_COMMENT=20 -MULTILINE_COMMENT=21 -WS=22 -EXPLAIN_WS=23 -EXPLAIN_LINE_COMMENT=24 -EXPLAIN_MULTILINE_COMMENT=25 -PIPE=26 -QUOTED_STRING=27 -INTEGER_LITERAL=28 -DECIMAL_LITERAL=29 -BY=30 -AND=31 -ASC=32 -ASSIGN=33 -CAST_OP=34 -COMMA=35 -DESC=36 -DOT=37 -FALSE=38 -FIRST=39 -LAST=40 -LP=41 -IN=42 -IS=43 -LIKE=44 -NOT=45 -NULL=46 -NULLS=47 -OR=48 -PARAM=49 -RLIKE=50 -RP=51 -TRUE=52 -EQ=53 -CIEQ=54 -NEQ=55 -LT=56 -LTE=57 -GT=58 -GTE=59 -PLUS=60 -MINUS=61 -ASTERISK=62 -SLASH=63 -PERCENT=64 -OPENING_BRACKET=65 -CLOSING_BRACKET=66 -UNQUOTED_IDENTIFIER=67 -QUOTED_IDENTIFIER=68 -EXPR_LINE_COMMENT=69 -EXPR_MULTILINE_COMMENT=70 -EXPR_WS=71 -OPTIONS=72 -METADATA=73 -FROM_UNQUOTED_IDENTIFIER=74 -FROM_LINE_COMMENT=75 -FROM_MULTILINE_COMMENT=76 -FROM_WS=77 -ID_PATTERN=78 -PROJECT_LINE_COMMENT=79 -PROJECT_MULTILINE_COMMENT=80 -PROJECT_WS=81 -AS=82 -RENAME_LINE_COMMENT=83 -RENAME_MULTILINE_COMMENT=84 -RENAME_WS=85 -ON=86 -WITH=87 -ENRICH_POLICY_NAME=88 -ENRICH_LINE_COMMENT=89 -ENRICH_MULTILINE_COMMENT=90 -ENRICH_WS=91 -ENRICH_FIELD_LINE_COMMENT=92 -ENRICH_FIELD_MULTILINE_COMMENT=93 -ENRICH_FIELD_WS=94 -MVEXPAND_LINE_COMMENT=95 -MVEXPAND_MULTILINE_COMMENT=96 -MVEXPAND_WS=97 -INFO=98 -SHOW_LINE_COMMENT=99 -SHOW_MULTILINE_COMMENT=100 -SHOW_WS=101 -FUNCTIONS=102 -META_LINE_COMMENT=103 -META_MULTILINE_COMMENT=104 -META_WS=105 -COLON=106 -SETTING=107 -SETTING_LINE_COMMENT=108 -SETTTING_MULTILINE_COMMENT=109 -SETTING_WS=110 +METRICS=12 +MV_EXPAND=13 +RENAME=14 +ROW=15 +SHOW=16 +SORT=17 +STATS=18 +WHERE=19 +UNKNOWN_CMD=20 +LINE_COMMENT=21 +MULTILINE_COMMENT=22 +WS=23 +INDEX_UNQUOTED_IDENTIFIER=24 +EXPLAIN_WS=25 +EXPLAIN_LINE_COMMENT=26 +EXPLAIN_MULTILINE_COMMENT=27 +PIPE=28 +QUOTED_STRING=29 +INTEGER_LITERAL=30 +DECIMAL_LITERAL=31 +BY=32 +AND=33 +ASC=34 +ASSIGN=35 +CAST_OP=36 +COMMA=37 +DESC=38 +DOT=39 +FALSE=40 +FIRST=41 +LAST=42 +LP=43 +IN=44 +IS=45 +LIKE=46 +NOT=47 +NULL=48 +NULLS=49 +OR=50 +PARAM=51 +RLIKE=52 +RP=53 +TRUE=54 +EQ=55 +CIEQ=56 +NEQ=57 +LT=58 +LTE=59 +GT=60 +GTE=61 +PLUS=62 +MINUS=63 +ASTERISK=64 +SLASH=65 +PERCENT=66 +OPENING_BRACKET=67 +CLOSING_BRACKET=68 +UNQUOTED_IDENTIFIER=69 +QUOTED_IDENTIFIER=70 +EXPR_LINE_COMMENT=71 +EXPR_MULTILINE_COMMENT=72 +EXPR_WS=73 +OPTIONS=74 +METADATA=75 +FROM_LINE_COMMENT=76 +FROM_MULTILINE_COMMENT=77 +FROM_WS=78 +ID_PATTERN=79 +PROJECT_LINE_COMMENT=80 +PROJECT_MULTILINE_COMMENT=81 +PROJECT_WS=82 +AS=83 +RENAME_LINE_COMMENT=84 +RENAME_MULTILINE_COMMENT=85 +RENAME_WS=86 +ON=87 +WITH=88 +ENRICH_POLICY_NAME=89 +ENRICH_LINE_COMMENT=90 +ENRICH_MULTILINE_COMMENT=91 +ENRICH_WS=92 +ENRICH_FIELD_LINE_COMMENT=93 +ENRICH_FIELD_MULTILINE_COMMENT=94 +ENRICH_FIELD_WS=95 +MVEXPAND_LINE_COMMENT=96 +MVEXPAND_MULTILINE_COMMENT=97 +MVEXPAND_WS=98 +INFO=99 +SHOW_LINE_COMMENT=100 +SHOW_MULTILINE_COMMENT=101 +SHOW_WS=102 +FUNCTIONS=103 +META_LINE_COMMENT=104 +META_MULTILINE_COMMENT=105 +META_WS=106 +COLON=107 +SETTING=108 +SETTING_LINE_COMMENT=109 +SETTTING_MULTILINE_COMMENT=110 +SETTING_WS=111 +METRICS_LINE_COMMENT=112 +METRICS_MULTILINE_COMMENT=113 +METRICS_WS=114 +CLOSING_METRICS_LINE_COMMENT=115 +CLOSING_METRICS_MULTILINE_COMMENT=116 +CLOSING_METRICS_WS=117 'dissect'=1 'drop'=2 'enrich'=3 @@ -119,55 +126,56 @@ SETTING_WS=110 'keep'=9 'limit'=10 'meta'=11 -'mv_expand'=12 -'rename'=13 -'row'=14 -'show'=15 -'sort'=16 -'stats'=17 -'where'=18 -'|'=26 -'by'=30 -'and'=31 -'asc'=32 -'='=33 -'::'=34 -','=35 -'desc'=36 -'.'=37 -'false'=38 -'first'=39 -'last'=40 -'('=41 -'in'=42 -'is'=43 -'like'=44 -'not'=45 -'null'=46 -'nulls'=47 -'or'=48 -'?'=49 -'rlike'=50 -')'=51 -'true'=52 -'=='=53 -'=~'=54 -'!='=55 -'<'=56 -'<='=57 -'>'=58 -'>='=59 -'+'=60 -'-'=61 -'*'=62 -'/'=63 -'%'=64 -']'=66 -'options'=72 -'metadata'=73 -'as'=82 -'on'=86 -'with'=87 -'info'=98 -'functions'=102 -':'=106 +'metrics'=12 +'mv_expand'=13 +'rename'=14 +'row'=15 +'show'=16 +'sort'=17 +'stats'=18 +'where'=19 +'|'=28 +'by'=32 +'and'=33 +'asc'=34 +'='=35 +'::'=36 +','=37 +'desc'=38 +'.'=39 +'false'=40 +'first'=41 +'last'=42 +'('=43 +'in'=44 +'is'=45 +'like'=46 +'not'=47 +'null'=48 +'nulls'=49 +'or'=50 +'?'=51 +'rlike'=52 +')'=53 +'true'=54 +'=='=55 +'=~'=56 +'!='=57 +'<'=58 +'<='=59 +'>'=60 +'>='=61 +'+'=62 +'-'=63 +'*'=64 +'/'=65 +'%'=66 +']'=68 +'options'=74 +'metadata'=75 +'as'=83 +'on'=87 +'with'=88 +'info'=99 +'functions'=103 +':'=107 diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java new file mode 100644 index 0000000000000..fa23466f54f83 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -0,0 +1,48 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.action; + +import org.elasticsearch.features.NodeFeature; +import org.elasticsearch.rest.action.admin.cluster.RestNodesCapabilitiesAction; +import org.elasticsearch.xpack.esql.plugin.EsqlFeatures; + +import java.util.ArrayList; +import java.util.List; +import java.util.Set; + +/** + * A {@link Set} of "capabilities" supported by the {@link RestEsqlQueryAction} + * and {@link RestEsqlAsyncQueryAction} APIs. These are exposed over the + * {@link RestNodesCapabilitiesAction} and we use them to enable tests. + */ +public class EsqlCapabilities { + static final Set CAPABILITIES = capabilities(); + + private static Set capabilities() { + /* + * Add all of our cluster features without the leading "esql." + */ + List caps = new ArrayList<>(); + for (NodeFeature feature : new EsqlFeatures().getFeatures()) { + caps.add(cap(feature)); + } + for (NodeFeature feature : new EsqlFeatures().getHistoricalFeatures().keySet()) { + caps.add(cap(feature)); + } + return Set.copyOf(caps); + } + + /** + * Convert a {@link NodeFeature} from {@link EsqlFeatures} into a + * capability. + */ + public static String cap(NodeFeature feature) { + assert feature.id().startsWith("esql."); + return feature.id().substring("esql.".length()); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java index 49a0307a6599e..fdf39545a396b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java @@ -92,7 +92,7 @@ static EsqlQueryResponse deserialize(BlockStreamInput in) throws IOException { boolean isRunning = false; boolean isAsync = false; Profile profile = null; - if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_ASYNC_QUERY)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { asyncExecutionId = in.readOptionalString(); isRunning = in.readBoolean(); isAsync = in.readBoolean(); @@ -108,7 +108,7 @@ static EsqlQueryResponse deserialize(BlockStreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { - if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_ASYNC_QUERY)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeOptionalString(asyncExecutionId); out.writeBoolean(isRunning); out.writeBoolean(isAsync); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlAsyncQueryAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlAsyncQueryAction.java index 3f0289d49535a..ad47779fffbb6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlAsyncQueryAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlAsyncQueryAction.java @@ -39,6 +39,11 @@ public List routes() { return List.of(new Route(POST, "/_query/async")); } + @Override + public Set supportedCapabilities() { + return EsqlCapabilities.CAPABILITIES; + } + @Override protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { EsqlQueryRequest esqlRequest; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlQueryAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlQueryAction.java index 97a7f8e0e9e7d..268966422ce56 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlQueryAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlQueryAction.java @@ -39,6 +39,11 @@ public List routes() { return List.of(new Route(POST, "/_query")); } + @Override + public Set supportedCapabilities() { + return EsqlCapabilities.CAPABILITIES; + } + @Override protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { EsqlQueryRequest esqlRequest; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java index e9a2fb88e1991..84993a96f040d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java @@ -474,7 +474,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(matchType); out.writeString(matchField); out.writeWriteable(inputPage); - PlanStreamOutput planOut = new PlanStreamOutput(out, PlanNameRegistry.INSTANCE); + PlanStreamOutput planOut = new PlanStreamOutput(out, PlanNameRegistry.INSTANCE, null); planOut.writeCollection(extractFields, writerFromPlanWriter(PlanStreamOutput::writeNamedExpression)); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java index f00e69ddaabe4..1018a03762cce 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java @@ -16,6 +16,7 @@ import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; @@ -59,12 +60,28 @@ record Condition(Expression condition, Expression value) {} "unsigned_long", "version" }, description = """ - Accepts pairs of conditions and values. - The function returns the value that belongs to the first condition that evaluates to true.""" + Accepts pairs of conditions and values. The function returns the value that + belongs to the first condition that evaluates to `true`. + + If the number of arguments is odd, the last argument is the default value which + is returned when no condition matches. If the number of arguments is even, and + no condition matches, the function returns `null`.""", + examples = { + @Example(description = "Determine whether employees are monolingual, bilingual, or polyglot:", file = "docs", tag = "case"), + @Example( + description = "Calculate the total connection success rate based on log messages:", + file = "conditional", + tag = "docsCaseSuccessRate" + ), + @Example( + description = "Calculate an hourly error rate as a percentage of the total number of log messages:", + file = "conditional", + tag = "docsCaseHourlyErrorRate" + ) } ) public Case( Source source, - @Param(name = "condition", type = { "boolean" }) Expression first, + @Param(name = "condition", type = { "boolean" }, description = "A condition.") Expression first, @Param( name = "trueValue", type = { @@ -79,7 +96,9 @@ public Case( "long", "text", "unsigned_long", - "version" } + "version" }, + description = "The value that's returned when the corresponding condition is the first to evaluate to `true`. " + + "The default value is returned when no condition matches." ) List rest ) { super(source, Stream.concat(Stream.of(first), rest.stream()).toList()); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java index 1794258402aed..b1c761a50d8be 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java @@ -11,6 +11,7 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; +import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; @@ -37,14 +38,26 @@ public class Greatest extends EsqlScalarFunction implements OptionalArgument { private DataType dataType; @FunctionInfo( - returnType = { "integer", "long", "double", "boolean", "keyword", "text", "ip", "version" }, - description = "Returns the maximum value from many columns." + returnType = { "boolean", "double", "integer", "ip", "keyword", "long", "text", "version" }, + description = "Returns the maximum value from multiple columns. This is similar to <>\n" + + "except it is intended to run on multiple columns at once.", + note = "When run on `keyword` or `text` fields, this returns the last string in alphabetical order. " + + "When run on `boolean` columns this will return `true` if any values are `true`.", + examples = @Example(file = "math", tag = "greatest") ) public Greatest( Source source, - @Param(name = "first", type = { "integer", "long", "double", "boolean", "keyword", "text", "ip", "version" }) Expression first, - @Param(name = "rest", type = { "integer", "long", "double", "boolean", "keyword", "text", "ip", "version" }, optional = true) List< - Expression> rest + @Param( + name = "first", + type = { "boolean", "double", "integer", "ip", "keyword", "long", "text", "version" }, + description = "First of the columns to evaluate." + ) Expression first, + @Param( + name = "rest", + type = { "boolean", "double", "integer", "ip", "keyword", "long", "text", "version" }, + description = "The rest of the columns to evaluate.", + optional = true + ) List rest ) { super(source, Stream.concat(Stream.of(first), rest.stream()).toList()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java index 6b4208f7b3d85..8b68196af68a5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java @@ -11,6 +11,7 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; +import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; @@ -37,14 +38,24 @@ public class Least extends EsqlScalarFunction implements OptionalArgument { private DataType dataType; @FunctionInfo( - returnType = { "integer", "long", "double", "boolean", "keyword", "text", "ip", "version" }, - description = "Returns the minimum value from many columns." + returnType = { "boolean", "double", "integer", "ip", "keyword", "long", "text", "version" }, + description = "Returns the minimum value from multiple columns. " + + "This is similar to <> except it is intended to run on multiple columns at once.", + examples = @Example(file = "math", tag = "least") ) public Least( Source source, - @Param(name = "first", type = { "integer", "long", "double", "boolean", "keyword", "text", "ip", "version" }) Expression first, - @Param(name = "rest", type = { "integer", "long", "double", "boolean", "keyword", "text", "ip", "version" }, optional = true) List< - Expression> rest + @Param( + name = "first", + type = { "boolean", "double", "integer", "ip", "keyword", "long", "text", "version" }, + description = "First of the columns to evaluate." + ) Expression first, + @Param( + name = "rest", + type = { "boolean", "double", "integer", "ip", "keyword", "long", "text", "version" }, + description = "The rest of the columns to evaluate.", + optional = true + ) List rest ) { super(source, Stream.concat(Stream.of(first), rest.stream()).toList()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiff.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiff.java index 6dc859afe37e3..55dff823806d2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiff.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiff.java @@ -145,7 +145,13 @@ public static Part resolve(String dateTimeUnit) { | millisecond | milliseconds, ms | microsecond | microseconds, mcs | nanosecond | nanoseconds, ns - |===""", examples = @Example(file = "date", tag = "docsDateDiff")) + |=== + + Note that while there is an overlap between the function's supported units and + {esql}'s supported time span literals, these sets are distinct and not + interchangeable. Similarly, the supported abbreviations are conveniently shared + with implementations of this function in other established products and not + necessarily common with the date-time nomenclature used by {es}.""", examples = @Example(file = "date", tag = "docsDateDiff")) public DateDiff( Source source, @Param(name = "unit", type = { "keyword", "text" }, description = "Time difference unit") Expression unit, diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java index 98dc0c7e83d93..8c39a29f67f95 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java @@ -52,12 +52,12 @@ public Coalesce( @Param( name = "first", type = { "boolean", "text", "integer", "keyword", "long" }, - description = "Expression to evaluate" + description = "Expression to evaluate." ) Expression first, @Param( name = "rest", type = { "boolean", "text", "integer", "keyword", "long" }, - description = "Other expression to evaluate", + description = "Other expression to evaluate.", optional = true ) List rest ) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContains.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContains.java index 279f31e34ac95..31e0a86a1e3ec 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContains.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContains.java @@ -111,7 +111,9 @@ private boolean pointRelatesGeometries(long encoded, Component2D[] rightComponen @FunctionInfo( returnType = { "boolean" }, - description = "Returns whether the first geometry contains the second geometry.", + description = """ + Returns whether the first geometry contains the second geometry. + This is the inverse of the <> function.""", examples = @Example(file = "spatial_shapes", tag = "st_contains-airport_city_boundaries") ) public SpatialContains( @@ -119,12 +121,16 @@ public SpatialContains( @Param( name = "geomA", type = { "geo_point", "cartesian_point", "geo_shape", "cartesian_shape" }, - description = "Geometry column name or variable of geometry type" + description = "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. " + + "If `null`, the function returns `null`." ) Expression left, @Param( name = "geomB", type = { "geo_point", "cartesian_point", "geo_shape", "cartesian_shape" }, - description = "Geometry column name or variable of geometry type" + description = "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. " + + "If `null`, the function returns `null`.\n" + + "The second parameter must also have the same coordinate system as the first.\n" + + "This means it is not possible to combine `geo_*` and `cartesian_*` parameters." ) Expression right ) { this(source, left, right, false, false); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjoint.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjoint.java index 7833f93b6270f..7b85ebfea5ee2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjoint.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjoint.java @@ -65,7 +65,10 @@ public class SpatialDisjoint extends SpatialRelatesFunction { @FunctionInfo( returnType = { "boolean" }, - description = "Returns whether the two geometries or geometry columns are disjoint.", + description = """ + Returns whether the two geometries or geometry columns are disjoint. + This is the inverse of the <> function. + In mathematical terms: ST_Disjoint(A, B) ⇔ A ⋂ B = ∅""", examples = @Example(file = "spatial_shapes", tag = "st_disjoint-airport_city_boundaries") ) public SpatialDisjoint( @@ -73,12 +76,16 @@ public SpatialDisjoint( @Param( name = "geomA", type = { "geo_point", "cartesian_point", "geo_shape", "cartesian_shape" }, - description = "Geometry column name or variable of geometry type" + description = "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. " + + "If `null`, the function returns `null`." ) Expression left, @Param( name = "geomB", type = { "geo_point", "cartesian_point", "geo_shape", "cartesian_shape" }, - description = "Geometry column name or variable of geometry type" + description = "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. " + + "If `null`, the function returns `null`.\n" + + "The second parameter must also have the same coordinate system as the first.\n" + + "This means it is not possible to combine `geo_*` and `cartesian_*` parameters." ) Expression right ) { this(source, left, right, false, false); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersects.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersects.java index 810e3206ada73..462f3bce1aeea 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersects.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersects.java @@ -63,22 +63,27 @@ public class SpatialIntersects extends SpatialRelatesFunction { new CartesianShapeIndexer("ST_Intersects") ); - @FunctionInfo( - returnType = { "boolean" }, - description = "Returns whether the two geometries or geometry columns intersect.", - examples = @Example(file = "spatial", tag = "st_intersects-airports") - ) + @FunctionInfo(returnType = { "boolean" }, description = """ + Returns true if two geometries intersect. + They intersect if they have any point in common, including their interior points + (points along lines or within polygons). + This is the inverse of the <> function. + In mathematical terms: ST_Intersects(A, B) ⇔ A ⋂ B ≠ ∅""", examples = @Example(file = "spatial", tag = "st_intersects-airports")) public SpatialIntersects( Source source, @Param( name = "geomA", type = { "geo_point", "cartesian_point", "geo_shape", "cartesian_shape" }, - description = "Geometry column name or variable of geometry type" + description = "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. " + + "If `null`, the function returns `null`." ) Expression left, @Param( name = "geomB", type = { "geo_point", "cartesian_point", "geo_shape", "cartesian_shape" }, - description = "Geometry column name or variable of geometry type" + description = "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. " + + "If `null`, the function returns `null`.\n" + + "The second parameter must also have the same coordinate system as the first.\n" + + "This means it is not possible to combine `geo_*` and `cartesian_*` parameters." ) Expression right ) { this(source, left, right, false, false); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithin.java index ca285ca07e27b..1eaf1e31e5430 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithin.java @@ -66,7 +66,9 @@ public class SpatialWithin extends SpatialRelatesFunction implements SurrogateEx @FunctionInfo( returnType = { "boolean" }, - description = "Returns whether the first geometry is within the second geometry.", + description = """ + Returns whether the first geometry is within the second geometry. + This is the inverse of the <> function.""", examples = @Example(file = "spatial_shapes", tag = "st_within-airport_city_boundaries") ) public SpatialWithin( @@ -74,12 +76,16 @@ public SpatialWithin( @Param( name = "geomA", type = { "geo_point", "cartesian_point", "geo_shape", "cartesian_shape" }, - description = "Geometry column name or variable of geometry type" + description = "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. " + + "If `null`, the function returns `null`." ) Expression left, @Param( name = "geomB", type = { "geo_point", "cartesian_point", "geo_shape", "cartesian_shape" }, - description = "Geometry column name or variable of geometry type" + description = "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. " + + "If `null`, the function returns `null`.\n" + + "The second parameter must also have the same coordinate system as the first.\n" + + "This means it is not possible to combine `geo_*` and `cartesian_*` parameters." ) Expression right ) { this(source, left, right, false, false); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StX.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StX.java index f86be9290fed1..f5ff933babc9a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StX.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StX.java @@ -10,6 +10,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.ann.ConvertEvaluator; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction; @@ -34,8 +35,20 @@ * Alternatively it is well described in PostGIS documentation at PostGIS:ST_X. */ public class StX extends UnaryScalarFunction { - @FunctionInfo(returnType = "double", description = "Extracts the x-coordinate from a point geometry.") - public StX(Source source, @Param(name = "point", type = { "geo_point", "cartesian_point" }) Expression field) { + @FunctionInfo( + returnType = "double", + description = "Extracts the `x` coordinate from the supplied point.\n" + + "If the points is of type `geo_point` this is equivalent to extracting the `longitude` value.", + examples = @Example(file = "spatial", tag = "st_x_y") + ) + public StX( + Source source, + @Param( + name = "point", + type = { "geo_point", "cartesian_point" }, + description = "Expression of type `geo_point` or `cartesian_point`. If `null`, the function returns `null`." + ) Expression field + ) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StY.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StY.java index 759c23c73374a..48de97da4befb 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StY.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StY.java @@ -10,6 +10,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.ann.ConvertEvaluator; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction; @@ -34,8 +35,20 @@ * Alternatively it is well described in PostGIS documentation at PostGIS:ST_Y. */ public class StY extends UnaryScalarFunction { - @FunctionInfo(returnType = "double", description = "Extracts the y-coordinate from a point geometry.") - public StY(Source source, @Param(name = "point", type = { "geo_point", "cartesian_point" }) Expression field) { + @FunctionInfo( + returnType = "double", + description = "Extracts the `y` coordinate from the supplied point.\n" + + "If the points is of type `geo_point` this is equivalent to extracting the `latitude` value.", + examples = @Example(file = "spatial", tag = "st_x_y") + ) + public StY( + Source source, + @Param( + name = "point", + type = { "geo_point", "cartesian_point" }, + description = "Expression of type `geo_point` or `cartesian_point`. If `null`, the function returns `null`." + ) Expression field + ) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java index 0cfffb128e0cf..4a5748f26a07f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java @@ -540,7 +540,7 @@ static EnrichExec readEnrichExec(PlanStreamInput in) throws IOException { final String policyMatchField = in.readString(); final Map concreteIndices; final Enrich.Mode mode; - if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_MULTI_CLUSTERS_ENRICH)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { mode = in.readEnum(Enrich.Mode.class); concreteIndices = in.readMap(StreamInput::readString, StreamInput::readString); } else { @@ -573,7 +573,7 @@ static void writeEnrichExec(PlanStreamOutput out, EnrichExec enrich) throws IOEx out.writeString(enrich.matchType()); } out.writeString(enrich.policyMatchField()); - if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_MULTI_CLUSTERS_ENRICH)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeEnum(enrich.mode()); out.writeMap(enrich.concreteIndices(), StreamOutput::writeString, StreamOutput::writeString); } else { @@ -824,19 +824,19 @@ static void writeEval(PlanStreamOutput out, Eval eval) throws IOException { static Enrich readEnrich(PlanStreamInput in) throws IOException { Enrich.Mode mode = Enrich.Mode.ANY; - if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_ENRICH_POLICY_CCQ_MODE)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { mode = in.readEnum(Enrich.Mode.class); } final Source source = in.readSource(); final LogicalPlan child = in.readLogicalPlanNode(); final Expression policyName = in.readExpression(); final NamedExpression matchField = in.readNamedExpression(); - if (in.getTransportVersion().before(TransportVersions.ESQL_MULTI_CLUSTERS_ENRICH)) { + if (in.getTransportVersion().before(TransportVersions.V_8_13_0)) { in.readString(); // discard the old policy name } final EnrichPolicy policy = new EnrichPolicy(in); final Map concreteIndices; - if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_MULTI_CLUSTERS_ENRICH)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { concreteIndices = in.readMap(StreamInput::readString, StreamInput::readString); } else { EsIndex esIndex = readEsIndex(in); @@ -849,7 +849,7 @@ static Enrich readEnrich(PlanStreamInput in) throws IOException { } static void writeEnrich(PlanStreamOutput out, Enrich enrich) throws IOException { - if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_ENRICH_POLICY_CCQ_MODE)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeEnum(enrich.mode()); } @@ -857,11 +857,11 @@ static void writeEnrich(PlanStreamOutput out, Enrich enrich) throws IOException out.writeLogicalPlanNode(enrich.child()); out.writeExpression(enrich.policyName()); out.writeNamedExpression(enrich.matchField()); - if (out.getTransportVersion().before(TransportVersions.ESQL_MULTI_CLUSTERS_ENRICH)) { + if (out.getTransportVersion().before(TransportVersions.V_8_13_0)) { out.writeString(BytesRefs.toString(enrich.policyName().fold())); // old policy name } enrich.policy().writeTo(out); - if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_MULTI_CLUSTERS_ENRICH)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeMap(enrich.concreteIndices(), StreamOutput::writeString, StreamOutput::writeString); } else { Map concreteIndices = enrich.concreteIndices(); @@ -1819,8 +1819,8 @@ static void writeLiteral(PlanStreamOutput out, Literal literal) throws IOExcepti */ private static Object mapFromLiteralValue(PlanStreamOutput out, DataType dataType, Object value) { if (dataType == GEO_POINT || dataType == CARTESIAN_POINT) { - // In 8.12.0 and earlier builds of 8.13 (pre-release) we serialized point literals as encoded longs, but now use WKB - if (out.getTransportVersion().before(TransportVersions.ESQL_PLAN_POINT_LITERAL_WKB)) { + // In 8.12.0 we serialized point literals as encoded longs, but now use WKB + if (out.getTransportVersion().before(TransportVersions.V_8_13_0)) { if (value instanceof List list) { return list.stream().map(v -> mapFromLiteralValue(out, dataType, v)).toList(); } @@ -1836,8 +1836,8 @@ private static Object mapFromLiteralValue(PlanStreamOutput out, DataType dataTyp */ private static Object mapToLiteralValue(PlanStreamInput in, DataType dataType, Object value) { if (dataType == GEO_POINT || dataType == CARTESIAN_POINT) { - // In 8.12.0 and earlier builds of 8.13 (pre-release) we serialized point literals as encoded longs, but now use WKB - if (in.getTransportVersion().before(TransportVersions.ESQL_PLAN_POINT_LITERAL_WKB)) { + // In 8.12.0 we serialized point literals as encoded longs, but now use WKB + if (in.getTransportVersion().before(TransportVersions.V_8_13_0)) { if (value instanceof List list) { return list.stream().map(v -> mapToLiteralValue(in, dataType, v)).toList(); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java index 046e46d216bdc..93bd2518ae380 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java @@ -7,10 +7,23 @@ package org.elasticsearch.xpack.esql.io.stream; +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.breaker.NoopCircuitBreaker; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BlockStreamInput; +import org.elasticsearch.compute.data.BlockUtils; +import org.elasticsearch.compute.data.BooleanBigArrayBlock; +import org.elasticsearch.compute.data.DoubleBigArrayBlock; +import org.elasticsearch.compute.data.IntBigArrayBlock; +import org.elasticsearch.compute.data.LongBigArrayBlock; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.Column; import org.elasticsearch.xpack.esql.io.stream.PlanNameRegistry.PlanNamedReader; import org.elasticsearch.xpack.esql.io.stream.PlanNameRegistry.PlanReader; import org.elasticsearch.xpack.esql.plan.physical.EsQueryExec; @@ -60,6 +73,8 @@ public NameId apply(long streamNameId) { private static final Supplier> DEFAULT_NAME_ID_FUNC = NameIdMapper::new; + private final Map cachedBlocks = new HashMap<>(); + private final PlanNameRegistry registry; // hook for nameId, where can cache and map, for now just return a NameId of the same long value. @@ -180,6 +195,79 @@ public EsqlConfiguration configuration() throws IOException { return configuration; } + /** + * Read a {@link Block} as part of the plan. + *

+ * These {@link Block}s are not tracked by {@link BlockFactory} and closing them + * does nothing so they should be small. We do make sure not to send duplicates, + * reusing blocks sent as part of the {@link EsqlConfiguration#tables()} if + * possible, otherwise sending a {@linkplain Block} inline. + *

+ */ + public Block readCachedBlock() throws IOException { + byte key = readByte(); + Block block = switch (key) { + case PlanStreamOutput.NEW_BLOCK_KEY -> { + int id = readVInt(); + // TODO track blocks read over the wire.... Or slice them from BigArrays? Something. + Block b = new BlockStreamInput( + this, + new BlockFactory(new NoopCircuitBreaker(CircuitBreaker.REQUEST), BigArrays.NON_RECYCLING_INSTANCE) + ).readNamedWriteable(Block.class); + cachedBlocks.put(id, b); + yield b; + } + case PlanStreamOutput.FROM_PREVIOUS_KEY -> cachedBlocks.get(readVInt()); + case PlanStreamOutput.FROM_CONFIG_KEY -> { + String tableName = readString(); + Map table = configuration.tables().get(tableName); + if (table == null) { + throw new IOException("can't find table [" + tableName + "]"); + } + String columnName = readString(); + Column column = table.get(columnName); + if (column == null) { + throw new IOException("can't find column[" + columnName + "]"); + } + yield column.values(); + } + default -> throw new IOException("invalid encoding for Block"); + }; + assert block instanceof LongBigArrayBlock == false : "BigArrays not supported because we don't close"; + assert block instanceof IntBigArrayBlock == false : "BigArrays not supported because we don't close"; + assert block instanceof DoubleBigArrayBlock == false : "BigArrays not supported because we don't close"; + assert block instanceof BooleanBigArrayBlock == false : "BigArrays not supported because we don't close"; + return block; + } + + /** + * Read an array of {@link Block}s as part of the plan. + *

+ * These {@link Block}s are not tracked by {@link BlockFactory} and closing them + * does nothing so they should be small. We do make sure not to send duplicates, + * reusing blocks sent as part of the {@link EsqlConfiguration#tables()} if + * possible, otherwise sending a {@linkplain Block} inline. + *

+ */ + public Block[] readCachedBlockArray() throws IOException { + int len = readArraySize(); + if (len == 0) { + return BlockUtils.NO_BLOCKS; + } + Block[] blocks = new Block[len]; + try { + for (int i = 0; i < blocks.length; i++) { + blocks[i] = readCachedBlock(); + } + return blocks; + } finally { + if (blocks[blocks.length - 1] == null) { + // Wasn't successful reading all blocks + Releasables.closeExpectNoException(blocks); + } + } + } + static void throwOnNullOptionalRead(Class type) throws IOException { final IOException e = new IOException("read optional named returned null which is not allowed, type:" + type); assert false : e; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutput.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutput.java index 5ee292b6add9e..d78e004aade31 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutput.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutput.java @@ -8,9 +8,20 @@ package org.elasticsearch.xpack.esql.io.stream; import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BooleanBigArrayBlock; +import org.elasticsearch.compute.data.DoubleBigArrayBlock; +import org.elasticsearch.compute.data.IntBigArrayBlock; +import org.elasticsearch.compute.data.LongBigArrayBlock; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.xpack.esql.Column; import org.elasticsearch.xpack.esql.io.stream.PlanNameRegistry.PlanWriter; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; +import org.elasticsearch.xpack.esql.session.EsqlConfiguration; import org.elasticsearch.xpack.ql.expression.Attribute; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.NamedExpression; @@ -18,6 +29,8 @@ import org.elasticsearch.xpack.ql.tree.Source; import java.io.IOException; +import java.util.IdentityHashMap; +import java.util.Map; import java.util.function.Function; import static org.elasticsearch.xpack.ql.util.SourceUtils.writeSourceNoText; @@ -28,19 +41,42 @@ */ public final class PlanStreamOutput extends StreamOutput { + /** + * Cache of written blocks. We use an {@link IdentityHashMap} for this + * because calculating the {@link Object#hashCode} of a {@link Block} + * is slow. And so is {@link Object#equals}. So, instead we just use + * object identity. + */ + private final Map cachedBlocks = new IdentityHashMap<>(); + private final StreamOutput delegate; private final PlanNameRegistry registry; private final Function, String> nameSupplier; - public PlanStreamOutput(StreamOutput delegate, PlanNameRegistry registry) { - this(delegate, registry, PlanNamedTypes::name); + private int nextCachedBlock = 0; + + public PlanStreamOutput(StreamOutput delegate, PlanNameRegistry registry, @Nullable EsqlConfiguration configuration) + throws IOException { + this(delegate, registry, configuration, PlanNamedTypes::name); } - public PlanStreamOutput(StreamOutput delegate, PlanNameRegistry registry, Function, String> nameSupplier) { + public PlanStreamOutput( + StreamOutput delegate, + PlanNameRegistry registry, + @Nullable EsqlConfiguration configuration, + Function, String> nameSupplier + ) throws IOException { this.delegate = delegate; this.registry = registry; this.nameSupplier = nameSupplier; + if (configuration != null) { + for (Map.Entry> table : configuration.tables().entrySet()) { + for (Map.Entry column : table.getValue().entrySet()) { + cachedBlocks.put(column.getValue().values(), fromConfigKey(table.getKey(), column.getKey())); + } + } + } } public void writeLogicalPlanNode(LogicalPlan logicalPlan) throws IOException { @@ -130,4 +166,86 @@ public void setTransportVersion(TransportVersion version) { delegate.setTransportVersion(version); super.setTransportVersion(version); } + + /** + * Write a {@link Block} as part of the plan. + *

+ * These {@link Block}s are not tracked by {@link BlockFactory} and closing them + * does nothing so they should be small. We do make sure not to send duplicates, + * reusing blocks sent as part of the {@link EsqlConfiguration#tables()} if + * possible, otherwise sending a {@linkplain Block} inline. + *

+ */ + public void writeCachedBlock(Block block) throws IOException { + assert block instanceof LongBigArrayBlock == false : "BigArrays not supported because we don't close"; + assert block instanceof IntBigArrayBlock == false : "BigArrays not supported because we don't close"; + assert block instanceof DoubleBigArrayBlock == false : "BigArrays not supported because we don't close"; + assert block instanceof BooleanBigArrayBlock == false : "BigArrays not supported because we don't close"; + BytesReference key = cachedBlocks.get(block); + if (key != null) { + key.writeTo(this); + return; + } + writeByte(NEW_BLOCK_KEY); + writeVInt(nextCachedBlock); + cachedBlocks.put(block, fromPreviousKey(nextCachedBlock)); + writeNamedWriteable(block); + nextCachedBlock++; + } + + /** + * The byte representing a {@link Block} sent for the first time. The byte + * will be followed by a {@link StreamOutput#writeVInt} encoded identifier + * and then the contents of the {@linkplain Block} will immediately follow + * this byte. + */ + static final byte NEW_BLOCK_KEY = 0; + + /** + * The byte representing a {@link Block} that has previously been sent. + * This byte will be followed up a {@link StreamOutput#writeVInt} encoded + * identifier pointing to the block to read. + */ + static final byte FROM_PREVIOUS_KEY = 1; + + /** + * The byte representing a {@link Block} that was part of the + * {@link EsqlConfiguration#tables()} map. It is followed a string for + * the table name and then a string for the column name. + */ + static final byte FROM_CONFIG_KEY = 2; + + /** + * Build the key for reading a {@link Block} from the cache of previously + * received {@linkplain Block}s. + */ + static BytesReference fromPreviousKey(int id) throws IOException { + try (BytesStreamOutput key = new BytesStreamOutput()) { + key.writeByte(FROM_PREVIOUS_KEY); + key.writeVInt(id); + return key.bytes(); + } + } + + /** + * Build the key for reading a {@link Block} from the {@link EsqlConfiguration}. + * This is important because some operations like {@code LOOKUP} frequently read + * {@linkplain Block}s directly from the configuration. + *

+ * It'd be possible to implement this by adding all of the Blocks as "previous" + * keys in the constructor and never use this construct at all, but that'd + * require there be a consistent ordering of Blocks there. We could make one, + * but I'm afraid that'd be brittle as we evolve the code. It'd make wire + * compatibility difficult. This signal is much simpler to deal with even though + * it is more bytes over the wire. + *

+ */ + static BytesReference fromConfigKey(String table, String column) throws IOException { + try (BytesStreamOutput key = new BytesStreamOutput()) { + key.writeByte(FROM_CONFIG_KEY); + key.writeString(table); + key.writeString(column); + return key.bytes(); + } + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp index 866093ef55a6c..899f745e50c3a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp @@ -11,6 +11,7 @@ null 'keep' 'limit' 'meta' +'metrics' 'mv_expand' 'rename' 'row' @@ -25,6 +26,7 @@ null null null null +null '|' null null @@ -80,7 +82,6 @@ null null null null -null 'as' null null @@ -110,6 +111,12 @@ null null null null +null +null +null +null +null +null token symbolic names: null @@ -124,6 +131,7 @@ INLINESTATS KEEP LIMIT META +METRICS MV_EXPAND RENAME ROW @@ -135,6 +143,7 @@ UNKNOWN_CMD LINE_COMMENT MULTILINE_COMMENT WS +INDEX_UNQUOTED_IDENTIFIER EXPLAIN_WS EXPLAIN_LINE_COMMENT EXPLAIN_MULTILINE_COMMENT @@ -186,7 +195,6 @@ EXPR_MULTILINE_COMMENT EXPR_WS OPTIONS METADATA -FROM_UNQUOTED_IDENTIFIER FROM_LINE_COMMENT FROM_MULTILINE_COMMENT FROM_WS @@ -223,6 +231,12 @@ SETTING SETTING_LINE_COMMENT SETTTING_MULTILINE_COMMENT SETTING_WS +METRICS_LINE_COMMENT +METRICS_MULTILINE_COMMENT +METRICS_WS +CLOSING_METRICS_LINE_COMMENT +CLOSING_METRICS_MULTILINE_COMMENT +CLOSING_METRICS_WS rule names: DISSECT @@ -236,6 +250,7 @@ INLINESTATS KEEP LIMIT META +METRICS MV_EXPAND RENAME ROW @@ -247,6 +262,8 @@ UNKNOWN_CMD LINE_COMMENT MULTILINE_COMMENT WS +INDEX_UNQUOTED_IDENTIFIER_PART +INDEX_UNQUOTED_IDENTIFIER EXPLAIN_OPENING_BRACKET EXPLAIN_PIPE EXPLAIN_WS @@ -317,9 +334,7 @@ FROM_ASSIGN FROM_QUOTED_STRING OPTIONS METADATA -FROM_UNQUOTED_IDENTIFIER_PART -FROM_UNQUOTED_IDENTIFIER -FROM_QUOTED_IDENTIFIER +FROM_INDEX_UNQUOTED_IDENTIFIER FROM_LINE_COMMENT FROM_MULTILINE_COMMENT FROM_WS @@ -385,6 +400,19 @@ SETTING SETTING_LINE_COMMENT SETTTING_MULTILINE_COMMENT SETTING_WS +METRICS_PIPE +METRICS_INDEX_UNQUOTED_IDENTIFIER +METRICS_LINE_COMMENT +METRICS_MULTILINE_COMMENT +METRICS_WS +CLOSING_METRICS_COMMA +CLOSING_METRICS_LINE_COMMENT +CLOSING_METRICS_MULTILINE_COMMENT +CLOSING_METRICS_WS +CLOSING_METRICS_QUOTED_IDENTIFIER +CLOSING_METRICS_UNQUOTED_IDENTIFIER +CLOSING_METRICS_BY +CLOSING_METRICS_PIPE channel names: DEFAULT_TOKEN_CHANNEL @@ -403,6 +431,8 @@ MVEXPAND_MODE SHOW_MODE META_MODE SETTING_MODE +METRICS_MODE +CLOSING_METRICS_MODE atn: -[4, 0, 110, 1203, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 2, 143, 7, 143, 2, 144, 7, 144, 2, 145, 7, 145, 2, 146, 7, 146, 2, 147, 7, 147, 2, 148, 7, 148, 2, 149, 7, 149, 2, 150, 7, 150, 2, 151, 7, 151, 2, 152, 7, 152, 2, 153, 7, 153, 2, 154, 7, 154, 2, 155, 7, 155, 2, 156, 7, 156, 2, 157, 7, 157, 2, 158, 7, 158, 2, 159, 7, 159, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 4, 18, 484, 8, 18, 11, 18, 12, 18, 485, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 5, 19, 494, 8, 19, 10, 19, 12, 19, 497, 9, 19, 1, 19, 3, 19, 500, 8, 19, 1, 19, 3, 19, 503, 8, 19, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 5, 20, 512, 8, 20, 10, 20, 12, 20, 515, 9, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 21, 4, 21, 523, 8, 21, 11, 21, 12, 21, 524, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 1, 27, 1, 27, 1, 28, 1, 28, 1, 29, 1, 29, 1, 30, 1, 30, 1, 30, 1, 31, 1, 31, 1, 32, 1, 32, 3, 32, 566, 8, 32, 1, 32, 4, 32, 569, 8, 32, 11, 32, 12, 32, 570, 1, 33, 1, 33, 1, 34, 1, 34, 1, 35, 1, 35, 1, 35, 3, 35, 580, 8, 35, 1, 36, 1, 36, 1, 37, 1, 37, 1, 37, 3, 37, 587, 8, 37, 1, 38, 1, 38, 1, 38, 5, 38, 592, 8, 38, 10, 38, 12, 38, 595, 9, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 5, 38, 603, 8, 38, 10, 38, 12, 38, 606, 9, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 3, 38, 613, 8, 38, 1, 38, 3, 38, 616, 8, 38, 3, 38, 618, 8, 38, 1, 39, 4, 39, 621, 8, 39, 11, 39, 12, 39, 622, 1, 40, 4, 40, 626, 8, 40, 11, 40, 12, 40, 627, 1, 40, 1, 40, 5, 40, 632, 8, 40, 10, 40, 12, 40, 635, 9, 40, 1, 40, 1, 40, 4, 40, 639, 8, 40, 11, 40, 12, 40, 640, 1, 40, 4, 40, 644, 8, 40, 11, 40, 12, 40, 645, 1, 40, 1, 40, 5, 40, 650, 8, 40, 10, 40, 12, 40, 653, 9, 40, 3, 40, 655, 8, 40, 1, 40, 1, 40, 1, 40, 1, 40, 4, 40, 661, 8, 40, 11, 40, 12, 40, 662, 1, 40, 1, 40, 3, 40, 667, 8, 40, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 61, 1, 61, 1, 61, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 64, 1, 64, 1, 64, 1, 65, 1, 65, 1, 65, 1, 66, 1, 66, 1, 66, 1, 67, 1, 67, 1, 68, 1, 68, 1, 68, 1, 69, 1, 69, 1, 70, 1, 70, 1, 70, 1, 71, 1, 71, 1, 72, 1, 72, 1, 73, 1, 73, 1, 74, 1, 74, 1, 75, 1, 75, 1, 76, 1, 76, 1, 76, 1, 76, 1, 76, 1, 77, 1, 77, 1, 77, 1, 77, 1, 77, 1, 78, 1, 78, 5, 78, 798, 8, 78, 10, 78, 12, 78, 801, 9, 78, 1, 78, 1, 78, 3, 78, 805, 8, 78, 1, 78, 4, 78, 808, 8, 78, 11, 78, 12, 78, 809, 3, 78, 812, 8, 78, 1, 79, 1, 79, 4, 79, 816, 8, 79, 11, 79, 12, 79, 817, 1, 79, 1, 79, 1, 80, 1, 80, 1, 81, 1, 81, 1, 81, 1, 81, 1, 82, 1, 82, 1, 82, 1, 82, 1, 83, 1, 83, 1, 83, 1, 83, 1, 84, 1, 84, 1, 84, 1, 84, 1, 84, 1, 85, 1, 85, 1, 85, 1, 85, 1, 86, 1, 86, 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 1, 87, 1, 88, 1, 88, 1, 88, 1, 88, 1, 89, 1, 89, 1, 89, 1, 89, 1, 90, 1, 90, 1, 90, 1, 90, 1, 90, 1, 90, 1, 90, 1, 90, 1, 91, 1, 91, 1, 91, 1, 91, 1, 91, 1, 91, 1, 91, 1, 91, 1, 91, 1, 92, 1, 92, 1, 92, 3, 92, 881, 8, 92, 1, 93, 4, 93, 884, 8, 93, 11, 93, 12, 93, 885, 1, 94, 1, 94, 1, 94, 1, 94, 1, 95, 1, 95, 1, 95, 1, 95, 1, 96, 1, 96, 1, 96, 1, 96, 1, 97, 1, 97, 1, 97, 1, 97, 1, 98, 1, 98, 1, 98, 1, 98, 1, 98, 1, 99, 1, 99, 1, 99, 1, 99, 1, 100, 1, 100, 1, 100, 1, 100, 1, 101, 1, 101, 1, 101, 1, 101, 3, 101, 921, 8, 101, 1, 102, 1, 102, 3, 102, 925, 8, 102, 1, 102, 5, 102, 928, 8, 102, 10, 102, 12, 102, 931, 9, 102, 1, 102, 1, 102, 3, 102, 935, 8, 102, 1, 102, 4, 102, 938, 8, 102, 11, 102, 12, 102, 939, 3, 102, 942, 8, 102, 1, 103, 1, 103, 4, 103, 946, 8, 103, 11, 103, 12, 103, 947, 1, 104, 1, 104, 1, 104, 1, 104, 1, 105, 1, 105, 1, 105, 1, 105, 1, 106, 1, 106, 1, 106, 1, 106, 1, 107, 1, 107, 1, 107, 1, 107, 1, 107, 1, 108, 1, 108, 1, 108, 1, 108, 1, 109, 1, 109, 1, 109, 1, 109, 1, 110, 1, 110, 1, 110, 1, 110, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 112, 1, 113, 1, 113, 1, 113, 1, 113, 1, 114, 1, 114, 1, 114, 1, 114, 1, 115, 1, 115, 1, 115, 1, 115, 1, 116, 1, 116, 1, 116, 1, 116, 1, 116, 1, 117, 1, 117, 1, 117, 1, 117, 1, 117, 1, 118, 1, 118, 1, 118, 1, 118, 1, 118, 1, 119, 1, 119, 1, 119, 1, 119, 1, 119, 1, 119, 1, 119, 1, 120, 1, 120, 1, 121, 4, 121, 1023, 8, 121, 11, 121, 12, 121, 1024, 1, 121, 1, 121, 3, 121, 1029, 8, 121, 1, 121, 4, 121, 1032, 8, 121, 11, 121, 12, 121, 1033, 1, 122, 1, 122, 1, 122, 1, 122, 1, 123, 1, 123, 1, 123, 1, 123, 1, 124, 1, 124, 1, 124, 1, 124, 1, 125, 1, 125, 1, 125, 1, 125, 1, 126, 1, 126, 1, 126, 1, 126, 1, 127, 1, 127, 1, 127, 1, 127, 1, 127, 1, 127, 1, 128, 1, 128, 1, 128, 1, 128, 1, 129, 1, 129, 1, 129, 1, 129, 1, 130, 1, 130, 1, 130, 1, 130, 1, 131, 1, 131, 1, 131, 1, 131, 1, 132, 1, 132, 1, 132, 1, 132, 1, 133, 1, 133, 1, 133, 1, 133, 1, 134, 1, 134, 1, 134, 1, 134, 1, 135, 1, 135, 1, 135, 1, 135, 1, 136, 1, 136, 1, 136, 1, 136, 1, 137, 1, 137, 1, 137, 1, 137, 1, 137, 1, 138, 1, 138, 1, 138, 1, 138, 1, 139, 1, 139, 1, 139, 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 1, 142, 1, 142, 1, 143, 1, 143, 1, 143, 1, 143, 1, 144, 1, 144, 1, 144, 1, 144, 1, 144, 1, 145, 1, 145, 1, 145, 1, 145, 1, 145, 1, 146, 1, 146, 1, 146, 1, 146, 1, 147, 1, 147, 1, 147, 1, 147, 1, 148, 1, 148, 1, 148, 1, 148, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 1, 150, 1, 150, 1, 150, 1, 150, 1, 150, 1, 150, 1, 150, 1, 150, 1, 150, 1, 150, 1, 151, 1, 151, 1, 151, 1, 151, 1, 152, 1, 152, 1, 152, 1, 152, 1, 153, 1, 153, 1, 153, 1, 153, 1, 154, 1, 154, 1, 154, 1, 154, 1, 154, 1, 155, 1, 155, 1, 156, 1, 156, 1, 156, 1, 156, 1, 156, 4, 156, 1188, 8, 156, 11, 156, 12, 156, 1189, 1, 157, 1, 157, 1, 157, 1, 157, 1, 158, 1, 158, 1, 158, 1, 158, 1, 159, 1, 159, 1, 159, 1, 159, 2, 513, 604, 0, 160, 12, 1, 14, 2, 16, 3, 18, 4, 20, 5, 22, 6, 24, 7, 26, 8, 28, 9, 30, 10, 32, 11, 34, 12, 36, 13, 38, 14, 40, 15, 42, 16, 44, 17, 46, 18, 48, 19, 50, 20, 52, 21, 54, 22, 56, 0, 58, 0, 60, 23, 62, 24, 64, 25, 66, 26, 68, 0, 70, 0, 72, 0, 74, 0, 76, 0, 78, 0, 80, 0, 82, 0, 84, 0, 86, 0, 88, 27, 90, 28, 92, 29, 94, 30, 96, 31, 98, 32, 100, 33, 102, 34, 104, 35, 106, 36, 108, 37, 110, 38, 112, 39, 114, 40, 116, 41, 118, 42, 120, 43, 122, 44, 124, 45, 126, 46, 128, 47, 130, 48, 132, 49, 134, 50, 136, 51, 138, 52, 140, 53, 142, 54, 144, 55, 146, 56, 148, 57, 150, 58, 152, 59, 154, 60, 156, 61, 158, 62, 160, 63, 162, 64, 164, 65, 166, 66, 168, 67, 170, 0, 172, 68, 174, 69, 176, 70, 178, 71, 180, 0, 182, 0, 184, 0, 186, 0, 188, 0, 190, 0, 192, 72, 194, 73, 196, 0, 198, 74, 200, 0, 202, 75, 204, 76, 206, 77, 208, 0, 210, 0, 212, 0, 214, 0, 216, 0, 218, 78, 220, 79, 222, 80, 224, 81, 226, 0, 228, 0, 230, 0, 232, 0, 234, 82, 236, 0, 238, 83, 240, 84, 242, 85, 244, 0, 246, 0, 248, 86, 250, 87, 252, 0, 254, 88, 256, 0, 258, 0, 260, 89, 262, 90, 264, 91, 266, 0, 268, 0, 270, 0, 272, 0, 274, 0, 276, 0, 278, 0, 280, 92, 282, 93, 284, 94, 286, 0, 288, 0, 290, 0, 292, 0, 294, 95, 296, 96, 298, 97, 300, 0, 302, 98, 304, 99, 306, 100, 308, 101, 310, 0, 312, 102, 314, 103, 316, 104, 318, 105, 320, 0, 322, 106, 324, 107, 326, 108, 328, 109, 330, 110, 12, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 6, 0, 9, 10, 13, 13, 32, 32, 47, 47, 91, 91, 93, 93, 2, 0, 10, 10, 13, 13, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 5, 0, 34, 34, 92, 92, 110, 110, 114, 114, 116, 116, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 2, 0, 69, 69, 101, 101, 2, 0, 43, 43, 45, 45, 1, 0, 96, 96, 10, 0, 9, 10, 13, 13, 32, 32, 44, 44, 47, 47, 61, 61, 91, 91, 93, 93, 96, 96, 124, 124, 2, 0, 42, 42, 47, 47, 11, 0, 9, 10, 13, 13, 32, 32, 34, 35, 44, 44, 47, 47, 58, 58, 60, 60, 62, 63, 92, 92, 124, 124, 1230, 0, 12, 1, 0, 0, 0, 0, 14, 1, 0, 0, 0, 0, 16, 1, 0, 0, 0, 0, 18, 1, 0, 0, 0, 0, 20, 1, 0, 0, 0, 0, 22, 1, 0, 0, 0, 0, 24, 1, 0, 0, 0, 0, 26, 1, 0, 0, 0, 0, 28, 1, 0, 0, 0, 0, 30, 1, 0, 0, 0, 0, 32, 1, 0, 0, 0, 0, 34, 1, 0, 0, 0, 0, 36, 1, 0, 0, 0, 0, 38, 1, 0, 0, 0, 0, 40, 1, 0, 0, 0, 0, 42, 1, 0, 0, 0, 0, 44, 1, 0, 0, 0, 0, 46, 1, 0, 0, 0, 0, 48, 1, 0, 0, 0, 0, 50, 1, 0, 0, 0, 0, 52, 1, 0, 0, 0, 0, 54, 1, 0, 0, 0, 1, 56, 1, 0, 0, 0, 1, 58, 1, 0, 0, 0, 1, 60, 1, 0, 0, 0, 1, 62, 1, 0, 0, 0, 1, 64, 1, 0, 0, 0, 2, 66, 1, 0, 0, 0, 2, 88, 1, 0, 0, 0, 2, 90, 1, 0, 0, 0, 2, 92, 1, 0, 0, 0, 2, 94, 1, 0, 0, 0, 2, 96, 1, 0, 0, 0, 2, 98, 1, 0, 0, 0, 2, 100, 1, 0, 0, 0, 2, 102, 1, 0, 0, 0, 2, 104, 1, 0, 0, 0, 2, 106, 1, 0, 0, 0, 2, 108, 1, 0, 0, 0, 2, 110, 1, 0, 0, 0, 2, 112, 1, 0, 0, 0, 2, 114, 1, 0, 0, 0, 2, 116, 1, 0, 0, 0, 2, 118, 1, 0, 0, 0, 2, 120, 1, 0, 0, 0, 2, 122, 1, 0, 0, 0, 2, 124, 1, 0, 0, 0, 2, 126, 1, 0, 0, 0, 2, 128, 1, 0, 0, 0, 2, 130, 1, 0, 0, 0, 2, 132, 1, 0, 0, 0, 2, 134, 1, 0, 0, 0, 2, 136, 1, 0, 0, 0, 2, 138, 1, 0, 0, 0, 2, 140, 1, 0, 0, 0, 2, 142, 1, 0, 0, 0, 2, 144, 1, 0, 0, 0, 2, 146, 1, 0, 0, 0, 2, 148, 1, 0, 0, 0, 2, 150, 1, 0, 0, 0, 2, 152, 1, 0, 0, 0, 2, 154, 1, 0, 0, 0, 2, 156, 1, 0, 0, 0, 2, 158, 1, 0, 0, 0, 2, 160, 1, 0, 0, 0, 2, 162, 1, 0, 0, 0, 2, 164, 1, 0, 0, 0, 2, 166, 1, 0, 0, 0, 2, 168, 1, 0, 0, 0, 2, 172, 1, 0, 0, 0, 2, 174, 1, 0, 0, 0, 2, 176, 1, 0, 0, 0, 2, 178, 1, 0, 0, 0, 3, 180, 1, 0, 0, 0, 3, 182, 1, 0, 0, 0, 3, 184, 1, 0, 0, 0, 3, 186, 1, 0, 0, 0, 3, 188, 1, 0, 0, 0, 3, 190, 1, 0, 0, 0, 3, 192, 1, 0, 0, 0, 3, 194, 1, 0, 0, 0, 3, 198, 1, 0, 0, 0, 3, 200, 1, 0, 0, 0, 3, 202, 1, 0, 0, 0, 3, 204, 1, 0, 0, 0, 3, 206, 1, 0, 0, 0, 4, 208, 1, 0, 0, 0, 4, 210, 1, 0, 0, 0, 4, 212, 1, 0, 0, 0, 4, 218, 1, 0, 0, 0, 4, 220, 1, 0, 0, 0, 4, 222, 1, 0, 0, 0, 4, 224, 1, 0, 0, 0, 5, 226, 1, 0, 0, 0, 5, 228, 1, 0, 0, 0, 5, 230, 1, 0, 0, 0, 5, 232, 1, 0, 0, 0, 5, 234, 1, 0, 0, 0, 5, 236, 1, 0, 0, 0, 5, 238, 1, 0, 0, 0, 5, 240, 1, 0, 0, 0, 5, 242, 1, 0, 0, 0, 6, 244, 1, 0, 0, 0, 6, 246, 1, 0, 0, 0, 6, 248, 1, 0, 0, 0, 6, 250, 1, 0, 0, 0, 6, 254, 1, 0, 0, 0, 6, 256, 1, 0, 0, 0, 6, 258, 1, 0, 0, 0, 6, 260, 1, 0, 0, 0, 6, 262, 1, 0, 0, 0, 6, 264, 1, 0, 0, 0, 7, 266, 1, 0, 0, 0, 7, 268, 1, 0, 0, 0, 7, 270, 1, 0, 0, 0, 7, 272, 1, 0, 0, 0, 7, 274, 1, 0, 0, 0, 7, 276, 1, 0, 0, 0, 7, 278, 1, 0, 0, 0, 7, 280, 1, 0, 0, 0, 7, 282, 1, 0, 0, 0, 7, 284, 1, 0, 0, 0, 8, 286, 1, 0, 0, 0, 8, 288, 1, 0, 0, 0, 8, 290, 1, 0, 0, 0, 8, 292, 1, 0, 0, 0, 8, 294, 1, 0, 0, 0, 8, 296, 1, 0, 0, 0, 8, 298, 1, 0, 0, 0, 9, 300, 1, 0, 0, 0, 9, 302, 1, 0, 0, 0, 9, 304, 1, 0, 0, 0, 9, 306, 1, 0, 0, 0, 9, 308, 1, 0, 0, 0, 10, 310, 1, 0, 0, 0, 10, 312, 1, 0, 0, 0, 10, 314, 1, 0, 0, 0, 10, 316, 1, 0, 0, 0, 10, 318, 1, 0, 0, 0, 11, 320, 1, 0, 0, 0, 11, 322, 1, 0, 0, 0, 11, 324, 1, 0, 0, 0, 11, 326, 1, 0, 0, 0, 11, 328, 1, 0, 0, 0, 11, 330, 1, 0, 0, 0, 12, 332, 1, 0, 0, 0, 14, 342, 1, 0, 0, 0, 16, 349, 1, 0, 0, 0, 18, 358, 1, 0, 0, 0, 20, 365, 1, 0, 0, 0, 22, 375, 1, 0, 0, 0, 24, 382, 1, 0, 0, 0, 26, 389, 1, 0, 0, 0, 28, 403, 1, 0, 0, 0, 30, 410, 1, 0, 0, 0, 32, 418, 1, 0, 0, 0, 34, 425, 1, 0, 0, 0, 36, 437, 1, 0, 0, 0, 38, 446, 1, 0, 0, 0, 40, 452, 1, 0, 0, 0, 42, 459, 1, 0, 0, 0, 44, 466, 1, 0, 0, 0, 46, 474, 1, 0, 0, 0, 48, 483, 1, 0, 0, 0, 50, 489, 1, 0, 0, 0, 52, 506, 1, 0, 0, 0, 54, 522, 1, 0, 0, 0, 56, 528, 1, 0, 0, 0, 58, 533, 1, 0, 0, 0, 60, 538, 1, 0, 0, 0, 62, 542, 1, 0, 0, 0, 64, 546, 1, 0, 0, 0, 66, 550, 1, 0, 0, 0, 68, 554, 1, 0, 0, 0, 70, 556, 1, 0, 0, 0, 72, 558, 1, 0, 0, 0, 74, 561, 1, 0, 0, 0, 76, 563, 1, 0, 0, 0, 78, 572, 1, 0, 0, 0, 80, 574, 1, 0, 0, 0, 82, 579, 1, 0, 0, 0, 84, 581, 1, 0, 0, 0, 86, 586, 1, 0, 0, 0, 88, 617, 1, 0, 0, 0, 90, 620, 1, 0, 0, 0, 92, 666, 1, 0, 0, 0, 94, 668, 1, 0, 0, 0, 96, 671, 1, 0, 0, 0, 98, 675, 1, 0, 0, 0, 100, 679, 1, 0, 0, 0, 102, 681, 1, 0, 0, 0, 104, 684, 1, 0, 0, 0, 106, 686, 1, 0, 0, 0, 108, 691, 1, 0, 0, 0, 110, 693, 1, 0, 0, 0, 112, 699, 1, 0, 0, 0, 114, 705, 1, 0, 0, 0, 116, 710, 1, 0, 0, 0, 118, 712, 1, 0, 0, 0, 120, 715, 1, 0, 0, 0, 122, 718, 1, 0, 0, 0, 124, 723, 1, 0, 0, 0, 126, 727, 1, 0, 0, 0, 128, 732, 1, 0, 0, 0, 130, 738, 1, 0, 0, 0, 132, 741, 1, 0, 0, 0, 134, 743, 1, 0, 0, 0, 136, 749, 1, 0, 0, 0, 138, 751, 1, 0, 0, 0, 140, 756, 1, 0, 0, 0, 142, 759, 1, 0, 0, 0, 144, 762, 1, 0, 0, 0, 146, 765, 1, 0, 0, 0, 148, 767, 1, 0, 0, 0, 150, 770, 1, 0, 0, 0, 152, 772, 1, 0, 0, 0, 154, 775, 1, 0, 0, 0, 156, 777, 1, 0, 0, 0, 158, 779, 1, 0, 0, 0, 160, 781, 1, 0, 0, 0, 162, 783, 1, 0, 0, 0, 164, 785, 1, 0, 0, 0, 166, 790, 1, 0, 0, 0, 168, 811, 1, 0, 0, 0, 170, 813, 1, 0, 0, 0, 172, 821, 1, 0, 0, 0, 174, 823, 1, 0, 0, 0, 176, 827, 1, 0, 0, 0, 178, 831, 1, 0, 0, 0, 180, 835, 1, 0, 0, 0, 182, 840, 1, 0, 0, 0, 184, 844, 1, 0, 0, 0, 186, 848, 1, 0, 0, 0, 188, 852, 1, 0, 0, 0, 190, 856, 1, 0, 0, 0, 192, 860, 1, 0, 0, 0, 194, 868, 1, 0, 0, 0, 196, 880, 1, 0, 0, 0, 198, 883, 1, 0, 0, 0, 200, 887, 1, 0, 0, 0, 202, 891, 1, 0, 0, 0, 204, 895, 1, 0, 0, 0, 206, 899, 1, 0, 0, 0, 208, 903, 1, 0, 0, 0, 210, 908, 1, 0, 0, 0, 212, 912, 1, 0, 0, 0, 214, 920, 1, 0, 0, 0, 216, 941, 1, 0, 0, 0, 218, 945, 1, 0, 0, 0, 220, 949, 1, 0, 0, 0, 222, 953, 1, 0, 0, 0, 224, 957, 1, 0, 0, 0, 226, 961, 1, 0, 0, 0, 228, 966, 1, 0, 0, 0, 230, 970, 1, 0, 0, 0, 232, 974, 1, 0, 0, 0, 234, 978, 1, 0, 0, 0, 236, 981, 1, 0, 0, 0, 238, 985, 1, 0, 0, 0, 240, 989, 1, 0, 0, 0, 242, 993, 1, 0, 0, 0, 244, 997, 1, 0, 0, 0, 246, 1002, 1, 0, 0, 0, 248, 1007, 1, 0, 0, 0, 250, 1012, 1, 0, 0, 0, 252, 1019, 1, 0, 0, 0, 254, 1028, 1, 0, 0, 0, 256, 1035, 1, 0, 0, 0, 258, 1039, 1, 0, 0, 0, 260, 1043, 1, 0, 0, 0, 262, 1047, 1, 0, 0, 0, 264, 1051, 1, 0, 0, 0, 266, 1055, 1, 0, 0, 0, 268, 1061, 1, 0, 0, 0, 270, 1065, 1, 0, 0, 0, 272, 1069, 1, 0, 0, 0, 274, 1073, 1, 0, 0, 0, 276, 1077, 1, 0, 0, 0, 278, 1081, 1, 0, 0, 0, 280, 1085, 1, 0, 0, 0, 282, 1089, 1, 0, 0, 0, 284, 1093, 1, 0, 0, 0, 286, 1097, 1, 0, 0, 0, 288, 1102, 1, 0, 0, 0, 290, 1106, 1, 0, 0, 0, 292, 1110, 1, 0, 0, 0, 294, 1114, 1, 0, 0, 0, 296, 1118, 1, 0, 0, 0, 298, 1122, 1, 0, 0, 0, 300, 1126, 1, 0, 0, 0, 302, 1131, 1, 0, 0, 0, 304, 1136, 1, 0, 0, 0, 306, 1140, 1, 0, 0, 0, 308, 1144, 1, 0, 0, 0, 310, 1148, 1, 0, 0, 0, 312, 1153, 1, 0, 0, 0, 314, 1163, 1, 0, 0, 0, 316, 1167, 1, 0, 0, 0, 318, 1171, 1, 0, 0, 0, 320, 1175, 1, 0, 0, 0, 322, 1180, 1, 0, 0, 0, 324, 1187, 1, 0, 0, 0, 326, 1191, 1, 0, 0, 0, 328, 1195, 1, 0, 0, 0, 330, 1199, 1, 0, 0, 0, 332, 333, 5, 100, 0, 0, 333, 334, 5, 105, 0, 0, 334, 335, 5, 115, 0, 0, 335, 336, 5, 115, 0, 0, 336, 337, 5, 101, 0, 0, 337, 338, 5, 99, 0, 0, 338, 339, 5, 116, 0, 0, 339, 340, 1, 0, 0, 0, 340, 341, 6, 0, 0, 0, 341, 13, 1, 0, 0, 0, 342, 343, 5, 100, 0, 0, 343, 344, 5, 114, 0, 0, 344, 345, 5, 111, 0, 0, 345, 346, 5, 112, 0, 0, 346, 347, 1, 0, 0, 0, 347, 348, 6, 1, 1, 0, 348, 15, 1, 0, 0, 0, 349, 350, 5, 101, 0, 0, 350, 351, 5, 110, 0, 0, 351, 352, 5, 114, 0, 0, 352, 353, 5, 105, 0, 0, 353, 354, 5, 99, 0, 0, 354, 355, 5, 104, 0, 0, 355, 356, 1, 0, 0, 0, 356, 357, 6, 2, 2, 0, 357, 17, 1, 0, 0, 0, 358, 359, 5, 101, 0, 0, 359, 360, 5, 118, 0, 0, 360, 361, 5, 97, 0, 0, 361, 362, 5, 108, 0, 0, 362, 363, 1, 0, 0, 0, 363, 364, 6, 3, 0, 0, 364, 19, 1, 0, 0, 0, 365, 366, 5, 101, 0, 0, 366, 367, 5, 120, 0, 0, 367, 368, 5, 112, 0, 0, 368, 369, 5, 108, 0, 0, 369, 370, 5, 97, 0, 0, 370, 371, 5, 105, 0, 0, 371, 372, 5, 110, 0, 0, 372, 373, 1, 0, 0, 0, 373, 374, 6, 4, 3, 0, 374, 21, 1, 0, 0, 0, 375, 376, 5, 102, 0, 0, 376, 377, 5, 114, 0, 0, 377, 378, 5, 111, 0, 0, 378, 379, 5, 109, 0, 0, 379, 380, 1, 0, 0, 0, 380, 381, 6, 5, 4, 0, 381, 23, 1, 0, 0, 0, 382, 383, 5, 103, 0, 0, 383, 384, 5, 114, 0, 0, 384, 385, 5, 111, 0, 0, 385, 386, 5, 107, 0, 0, 386, 387, 1, 0, 0, 0, 387, 388, 6, 6, 0, 0, 388, 25, 1, 0, 0, 0, 389, 390, 5, 105, 0, 0, 390, 391, 5, 110, 0, 0, 391, 392, 5, 108, 0, 0, 392, 393, 5, 105, 0, 0, 393, 394, 5, 110, 0, 0, 394, 395, 5, 101, 0, 0, 395, 396, 5, 115, 0, 0, 396, 397, 5, 116, 0, 0, 397, 398, 5, 97, 0, 0, 398, 399, 5, 116, 0, 0, 399, 400, 5, 115, 0, 0, 400, 401, 1, 0, 0, 0, 401, 402, 6, 7, 0, 0, 402, 27, 1, 0, 0, 0, 403, 404, 5, 107, 0, 0, 404, 405, 5, 101, 0, 0, 405, 406, 5, 101, 0, 0, 406, 407, 5, 112, 0, 0, 407, 408, 1, 0, 0, 0, 408, 409, 6, 8, 1, 0, 409, 29, 1, 0, 0, 0, 410, 411, 5, 108, 0, 0, 411, 412, 5, 105, 0, 0, 412, 413, 5, 109, 0, 0, 413, 414, 5, 105, 0, 0, 414, 415, 5, 116, 0, 0, 415, 416, 1, 0, 0, 0, 416, 417, 6, 9, 0, 0, 417, 31, 1, 0, 0, 0, 418, 419, 5, 109, 0, 0, 419, 420, 5, 101, 0, 0, 420, 421, 5, 116, 0, 0, 421, 422, 5, 97, 0, 0, 422, 423, 1, 0, 0, 0, 423, 424, 6, 10, 5, 0, 424, 33, 1, 0, 0, 0, 425, 426, 5, 109, 0, 0, 426, 427, 5, 118, 0, 0, 427, 428, 5, 95, 0, 0, 428, 429, 5, 101, 0, 0, 429, 430, 5, 120, 0, 0, 430, 431, 5, 112, 0, 0, 431, 432, 5, 97, 0, 0, 432, 433, 5, 110, 0, 0, 433, 434, 5, 100, 0, 0, 434, 435, 1, 0, 0, 0, 435, 436, 6, 11, 6, 0, 436, 35, 1, 0, 0, 0, 437, 438, 5, 114, 0, 0, 438, 439, 5, 101, 0, 0, 439, 440, 5, 110, 0, 0, 440, 441, 5, 97, 0, 0, 441, 442, 5, 109, 0, 0, 442, 443, 5, 101, 0, 0, 443, 444, 1, 0, 0, 0, 444, 445, 6, 12, 7, 0, 445, 37, 1, 0, 0, 0, 446, 447, 5, 114, 0, 0, 447, 448, 5, 111, 0, 0, 448, 449, 5, 119, 0, 0, 449, 450, 1, 0, 0, 0, 450, 451, 6, 13, 0, 0, 451, 39, 1, 0, 0, 0, 452, 453, 5, 115, 0, 0, 453, 454, 5, 104, 0, 0, 454, 455, 5, 111, 0, 0, 455, 456, 5, 119, 0, 0, 456, 457, 1, 0, 0, 0, 457, 458, 6, 14, 8, 0, 458, 41, 1, 0, 0, 0, 459, 460, 5, 115, 0, 0, 460, 461, 5, 111, 0, 0, 461, 462, 5, 114, 0, 0, 462, 463, 5, 116, 0, 0, 463, 464, 1, 0, 0, 0, 464, 465, 6, 15, 0, 0, 465, 43, 1, 0, 0, 0, 466, 467, 5, 115, 0, 0, 467, 468, 5, 116, 0, 0, 468, 469, 5, 97, 0, 0, 469, 470, 5, 116, 0, 0, 470, 471, 5, 115, 0, 0, 471, 472, 1, 0, 0, 0, 472, 473, 6, 16, 0, 0, 473, 45, 1, 0, 0, 0, 474, 475, 5, 119, 0, 0, 475, 476, 5, 104, 0, 0, 476, 477, 5, 101, 0, 0, 477, 478, 5, 114, 0, 0, 478, 479, 5, 101, 0, 0, 479, 480, 1, 0, 0, 0, 480, 481, 6, 17, 0, 0, 481, 47, 1, 0, 0, 0, 482, 484, 8, 0, 0, 0, 483, 482, 1, 0, 0, 0, 484, 485, 1, 0, 0, 0, 485, 483, 1, 0, 0, 0, 485, 486, 1, 0, 0, 0, 486, 487, 1, 0, 0, 0, 487, 488, 6, 18, 0, 0, 488, 49, 1, 0, 0, 0, 489, 490, 5, 47, 0, 0, 490, 491, 5, 47, 0, 0, 491, 495, 1, 0, 0, 0, 492, 494, 8, 1, 0, 0, 493, 492, 1, 0, 0, 0, 494, 497, 1, 0, 0, 0, 495, 493, 1, 0, 0, 0, 495, 496, 1, 0, 0, 0, 496, 499, 1, 0, 0, 0, 497, 495, 1, 0, 0, 0, 498, 500, 5, 13, 0, 0, 499, 498, 1, 0, 0, 0, 499, 500, 1, 0, 0, 0, 500, 502, 1, 0, 0, 0, 501, 503, 5, 10, 0, 0, 502, 501, 1, 0, 0, 0, 502, 503, 1, 0, 0, 0, 503, 504, 1, 0, 0, 0, 504, 505, 6, 19, 9, 0, 505, 51, 1, 0, 0, 0, 506, 507, 5, 47, 0, 0, 507, 508, 5, 42, 0, 0, 508, 513, 1, 0, 0, 0, 509, 512, 3, 52, 20, 0, 510, 512, 9, 0, 0, 0, 511, 509, 1, 0, 0, 0, 511, 510, 1, 0, 0, 0, 512, 515, 1, 0, 0, 0, 513, 514, 1, 0, 0, 0, 513, 511, 1, 0, 0, 0, 514, 516, 1, 0, 0, 0, 515, 513, 1, 0, 0, 0, 516, 517, 5, 42, 0, 0, 517, 518, 5, 47, 0, 0, 518, 519, 1, 0, 0, 0, 519, 520, 6, 20, 9, 0, 520, 53, 1, 0, 0, 0, 521, 523, 7, 2, 0, 0, 522, 521, 1, 0, 0, 0, 523, 524, 1, 0, 0, 0, 524, 522, 1, 0, 0, 0, 524, 525, 1, 0, 0, 0, 525, 526, 1, 0, 0, 0, 526, 527, 6, 21, 9, 0, 527, 55, 1, 0, 0, 0, 528, 529, 3, 164, 76, 0, 529, 530, 1, 0, 0, 0, 530, 531, 6, 22, 10, 0, 531, 532, 6, 22, 11, 0, 532, 57, 1, 0, 0, 0, 533, 534, 3, 66, 27, 0, 534, 535, 1, 0, 0, 0, 535, 536, 6, 23, 12, 0, 536, 537, 6, 23, 13, 0, 537, 59, 1, 0, 0, 0, 538, 539, 3, 54, 21, 0, 539, 540, 1, 0, 0, 0, 540, 541, 6, 24, 9, 0, 541, 61, 1, 0, 0, 0, 542, 543, 3, 50, 19, 0, 543, 544, 1, 0, 0, 0, 544, 545, 6, 25, 9, 0, 545, 63, 1, 0, 0, 0, 546, 547, 3, 52, 20, 0, 547, 548, 1, 0, 0, 0, 548, 549, 6, 26, 9, 0, 549, 65, 1, 0, 0, 0, 550, 551, 5, 124, 0, 0, 551, 552, 1, 0, 0, 0, 552, 553, 6, 27, 13, 0, 553, 67, 1, 0, 0, 0, 554, 555, 7, 3, 0, 0, 555, 69, 1, 0, 0, 0, 556, 557, 7, 4, 0, 0, 557, 71, 1, 0, 0, 0, 558, 559, 5, 92, 0, 0, 559, 560, 7, 5, 0, 0, 560, 73, 1, 0, 0, 0, 561, 562, 8, 6, 0, 0, 562, 75, 1, 0, 0, 0, 563, 565, 7, 7, 0, 0, 564, 566, 7, 8, 0, 0, 565, 564, 1, 0, 0, 0, 565, 566, 1, 0, 0, 0, 566, 568, 1, 0, 0, 0, 567, 569, 3, 68, 28, 0, 568, 567, 1, 0, 0, 0, 569, 570, 1, 0, 0, 0, 570, 568, 1, 0, 0, 0, 570, 571, 1, 0, 0, 0, 571, 77, 1, 0, 0, 0, 572, 573, 5, 64, 0, 0, 573, 79, 1, 0, 0, 0, 574, 575, 5, 96, 0, 0, 575, 81, 1, 0, 0, 0, 576, 580, 8, 9, 0, 0, 577, 578, 5, 96, 0, 0, 578, 580, 5, 96, 0, 0, 579, 576, 1, 0, 0, 0, 579, 577, 1, 0, 0, 0, 580, 83, 1, 0, 0, 0, 581, 582, 5, 95, 0, 0, 582, 85, 1, 0, 0, 0, 583, 587, 3, 70, 29, 0, 584, 587, 3, 68, 28, 0, 585, 587, 3, 84, 36, 0, 586, 583, 1, 0, 0, 0, 586, 584, 1, 0, 0, 0, 586, 585, 1, 0, 0, 0, 587, 87, 1, 0, 0, 0, 588, 593, 5, 34, 0, 0, 589, 592, 3, 72, 30, 0, 590, 592, 3, 74, 31, 0, 591, 589, 1, 0, 0, 0, 591, 590, 1, 0, 0, 0, 592, 595, 1, 0, 0, 0, 593, 591, 1, 0, 0, 0, 593, 594, 1, 0, 0, 0, 594, 596, 1, 0, 0, 0, 595, 593, 1, 0, 0, 0, 596, 618, 5, 34, 0, 0, 597, 598, 5, 34, 0, 0, 598, 599, 5, 34, 0, 0, 599, 600, 5, 34, 0, 0, 600, 604, 1, 0, 0, 0, 601, 603, 8, 1, 0, 0, 602, 601, 1, 0, 0, 0, 603, 606, 1, 0, 0, 0, 604, 605, 1, 0, 0, 0, 604, 602, 1, 0, 0, 0, 605, 607, 1, 0, 0, 0, 606, 604, 1, 0, 0, 0, 607, 608, 5, 34, 0, 0, 608, 609, 5, 34, 0, 0, 609, 610, 5, 34, 0, 0, 610, 612, 1, 0, 0, 0, 611, 613, 5, 34, 0, 0, 612, 611, 1, 0, 0, 0, 612, 613, 1, 0, 0, 0, 613, 615, 1, 0, 0, 0, 614, 616, 5, 34, 0, 0, 615, 614, 1, 0, 0, 0, 615, 616, 1, 0, 0, 0, 616, 618, 1, 0, 0, 0, 617, 588, 1, 0, 0, 0, 617, 597, 1, 0, 0, 0, 618, 89, 1, 0, 0, 0, 619, 621, 3, 68, 28, 0, 620, 619, 1, 0, 0, 0, 621, 622, 1, 0, 0, 0, 622, 620, 1, 0, 0, 0, 622, 623, 1, 0, 0, 0, 623, 91, 1, 0, 0, 0, 624, 626, 3, 68, 28, 0, 625, 624, 1, 0, 0, 0, 626, 627, 1, 0, 0, 0, 627, 625, 1, 0, 0, 0, 627, 628, 1, 0, 0, 0, 628, 629, 1, 0, 0, 0, 629, 633, 3, 108, 48, 0, 630, 632, 3, 68, 28, 0, 631, 630, 1, 0, 0, 0, 632, 635, 1, 0, 0, 0, 633, 631, 1, 0, 0, 0, 633, 634, 1, 0, 0, 0, 634, 667, 1, 0, 0, 0, 635, 633, 1, 0, 0, 0, 636, 638, 3, 108, 48, 0, 637, 639, 3, 68, 28, 0, 638, 637, 1, 0, 0, 0, 639, 640, 1, 0, 0, 0, 640, 638, 1, 0, 0, 0, 640, 641, 1, 0, 0, 0, 641, 667, 1, 0, 0, 0, 642, 644, 3, 68, 28, 0, 643, 642, 1, 0, 0, 0, 644, 645, 1, 0, 0, 0, 645, 643, 1, 0, 0, 0, 645, 646, 1, 0, 0, 0, 646, 654, 1, 0, 0, 0, 647, 651, 3, 108, 48, 0, 648, 650, 3, 68, 28, 0, 649, 648, 1, 0, 0, 0, 650, 653, 1, 0, 0, 0, 651, 649, 1, 0, 0, 0, 651, 652, 1, 0, 0, 0, 652, 655, 1, 0, 0, 0, 653, 651, 1, 0, 0, 0, 654, 647, 1, 0, 0, 0, 654, 655, 1, 0, 0, 0, 655, 656, 1, 0, 0, 0, 656, 657, 3, 76, 32, 0, 657, 667, 1, 0, 0, 0, 658, 660, 3, 108, 48, 0, 659, 661, 3, 68, 28, 0, 660, 659, 1, 0, 0, 0, 661, 662, 1, 0, 0, 0, 662, 660, 1, 0, 0, 0, 662, 663, 1, 0, 0, 0, 663, 664, 1, 0, 0, 0, 664, 665, 3, 76, 32, 0, 665, 667, 1, 0, 0, 0, 666, 625, 1, 0, 0, 0, 666, 636, 1, 0, 0, 0, 666, 643, 1, 0, 0, 0, 666, 658, 1, 0, 0, 0, 667, 93, 1, 0, 0, 0, 668, 669, 5, 98, 0, 0, 669, 670, 5, 121, 0, 0, 670, 95, 1, 0, 0, 0, 671, 672, 5, 97, 0, 0, 672, 673, 5, 110, 0, 0, 673, 674, 5, 100, 0, 0, 674, 97, 1, 0, 0, 0, 675, 676, 5, 97, 0, 0, 676, 677, 5, 115, 0, 0, 677, 678, 5, 99, 0, 0, 678, 99, 1, 0, 0, 0, 679, 680, 5, 61, 0, 0, 680, 101, 1, 0, 0, 0, 681, 682, 5, 58, 0, 0, 682, 683, 5, 58, 0, 0, 683, 103, 1, 0, 0, 0, 684, 685, 5, 44, 0, 0, 685, 105, 1, 0, 0, 0, 686, 687, 5, 100, 0, 0, 687, 688, 5, 101, 0, 0, 688, 689, 5, 115, 0, 0, 689, 690, 5, 99, 0, 0, 690, 107, 1, 0, 0, 0, 691, 692, 5, 46, 0, 0, 692, 109, 1, 0, 0, 0, 693, 694, 5, 102, 0, 0, 694, 695, 5, 97, 0, 0, 695, 696, 5, 108, 0, 0, 696, 697, 5, 115, 0, 0, 697, 698, 5, 101, 0, 0, 698, 111, 1, 0, 0, 0, 699, 700, 5, 102, 0, 0, 700, 701, 5, 105, 0, 0, 701, 702, 5, 114, 0, 0, 702, 703, 5, 115, 0, 0, 703, 704, 5, 116, 0, 0, 704, 113, 1, 0, 0, 0, 705, 706, 5, 108, 0, 0, 706, 707, 5, 97, 0, 0, 707, 708, 5, 115, 0, 0, 708, 709, 5, 116, 0, 0, 709, 115, 1, 0, 0, 0, 710, 711, 5, 40, 0, 0, 711, 117, 1, 0, 0, 0, 712, 713, 5, 105, 0, 0, 713, 714, 5, 110, 0, 0, 714, 119, 1, 0, 0, 0, 715, 716, 5, 105, 0, 0, 716, 717, 5, 115, 0, 0, 717, 121, 1, 0, 0, 0, 718, 719, 5, 108, 0, 0, 719, 720, 5, 105, 0, 0, 720, 721, 5, 107, 0, 0, 721, 722, 5, 101, 0, 0, 722, 123, 1, 0, 0, 0, 723, 724, 5, 110, 0, 0, 724, 725, 5, 111, 0, 0, 725, 726, 5, 116, 0, 0, 726, 125, 1, 0, 0, 0, 727, 728, 5, 110, 0, 0, 728, 729, 5, 117, 0, 0, 729, 730, 5, 108, 0, 0, 730, 731, 5, 108, 0, 0, 731, 127, 1, 0, 0, 0, 732, 733, 5, 110, 0, 0, 733, 734, 5, 117, 0, 0, 734, 735, 5, 108, 0, 0, 735, 736, 5, 108, 0, 0, 736, 737, 5, 115, 0, 0, 737, 129, 1, 0, 0, 0, 738, 739, 5, 111, 0, 0, 739, 740, 5, 114, 0, 0, 740, 131, 1, 0, 0, 0, 741, 742, 5, 63, 0, 0, 742, 133, 1, 0, 0, 0, 743, 744, 5, 114, 0, 0, 744, 745, 5, 108, 0, 0, 745, 746, 5, 105, 0, 0, 746, 747, 5, 107, 0, 0, 747, 748, 5, 101, 0, 0, 748, 135, 1, 0, 0, 0, 749, 750, 5, 41, 0, 0, 750, 137, 1, 0, 0, 0, 751, 752, 5, 116, 0, 0, 752, 753, 5, 114, 0, 0, 753, 754, 5, 117, 0, 0, 754, 755, 5, 101, 0, 0, 755, 139, 1, 0, 0, 0, 756, 757, 5, 61, 0, 0, 757, 758, 5, 61, 0, 0, 758, 141, 1, 0, 0, 0, 759, 760, 5, 61, 0, 0, 760, 761, 5, 126, 0, 0, 761, 143, 1, 0, 0, 0, 762, 763, 5, 33, 0, 0, 763, 764, 5, 61, 0, 0, 764, 145, 1, 0, 0, 0, 765, 766, 5, 60, 0, 0, 766, 147, 1, 0, 0, 0, 767, 768, 5, 60, 0, 0, 768, 769, 5, 61, 0, 0, 769, 149, 1, 0, 0, 0, 770, 771, 5, 62, 0, 0, 771, 151, 1, 0, 0, 0, 772, 773, 5, 62, 0, 0, 773, 774, 5, 61, 0, 0, 774, 153, 1, 0, 0, 0, 775, 776, 5, 43, 0, 0, 776, 155, 1, 0, 0, 0, 777, 778, 5, 45, 0, 0, 778, 157, 1, 0, 0, 0, 779, 780, 5, 42, 0, 0, 780, 159, 1, 0, 0, 0, 781, 782, 5, 47, 0, 0, 782, 161, 1, 0, 0, 0, 783, 784, 5, 37, 0, 0, 784, 163, 1, 0, 0, 0, 785, 786, 5, 91, 0, 0, 786, 787, 1, 0, 0, 0, 787, 788, 6, 76, 0, 0, 788, 789, 6, 76, 0, 0, 789, 165, 1, 0, 0, 0, 790, 791, 5, 93, 0, 0, 791, 792, 1, 0, 0, 0, 792, 793, 6, 77, 13, 0, 793, 794, 6, 77, 13, 0, 794, 167, 1, 0, 0, 0, 795, 799, 3, 70, 29, 0, 796, 798, 3, 86, 37, 0, 797, 796, 1, 0, 0, 0, 798, 801, 1, 0, 0, 0, 799, 797, 1, 0, 0, 0, 799, 800, 1, 0, 0, 0, 800, 812, 1, 0, 0, 0, 801, 799, 1, 0, 0, 0, 802, 805, 3, 84, 36, 0, 803, 805, 3, 78, 33, 0, 804, 802, 1, 0, 0, 0, 804, 803, 1, 0, 0, 0, 805, 807, 1, 0, 0, 0, 806, 808, 3, 86, 37, 0, 807, 806, 1, 0, 0, 0, 808, 809, 1, 0, 0, 0, 809, 807, 1, 0, 0, 0, 809, 810, 1, 0, 0, 0, 810, 812, 1, 0, 0, 0, 811, 795, 1, 0, 0, 0, 811, 804, 1, 0, 0, 0, 812, 169, 1, 0, 0, 0, 813, 815, 3, 80, 34, 0, 814, 816, 3, 82, 35, 0, 815, 814, 1, 0, 0, 0, 816, 817, 1, 0, 0, 0, 817, 815, 1, 0, 0, 0, 817, 818, 1, 0, 0, 0, 818, 819, 1, 0, 0, 0, 819, 820, 3, 80, 34, 0, 820, 171, 1, 0, 0, 0, 821, 822, 3, 170, 79, 0, 822, 173, 1, 0, 0, 0, 823, 824, 3, 50, 19, 0, 824, 825, 1, 0, 0, 0, 825, 826, 6, 81, 9, 0, 826, 175, 1, 0, 0, 0, 827, 828, 3, 52, 20, 0, 828, 829, 1, 0, 0, 0, 829, 830, 6, 82, 9, 0, 830, 177, 1, 0, 0, 0, 831, 832, 3, 54, 21, 0, 832, 833, 1, 0, 0, 0, 833, 834, 6, 83, 9, 0, 834, 179, 1, 0, 0, 0, 835, 836, 3, 66, 27, 0, 836, 837, 1, 0, 0, 0, 837, 838, 6, 84, 12, 0, 838, 839, 6, 84, 13, 0, 839, 181, 1, 0, 0, 0, 840, 841, 3, 164, 76, 0, 841, 842, 1, 0, 0, 0, 842, 843, 6, 85, 10, 0, 843, 183, 1, 0, 0, 0, 844, 845, 3, 166, 77, 0, 845, 846, 1, 0, 0, 0, 846, 847, 6, 86, 14, 0, 847, 185, 1, 0, 0, 0, 848, 849, 3, 104, 46, 0, 849, 850, 1, 0, 0, 0, 850, 851, 6, 87, 15, 0, 851, 187, 1, 0, 0, 0, 852, 853, 3, 100, 44, 0, 853, 854, 1, 0, 0, 0, 854, 855, 6, 88, 16, 0, 855, 189, 1, 0, 0, 0, 856, 857, 3, 88, 38, 0, 857, 858, 1, 0, 0, 0, 858, 859, 6, 89, 17, 0, 859, 191, 1, 0, 0, 0, 860, 861, 5, 111, 0, 0, 861, 862, 5, 112, 0, 0, 862, 863, 5, 116, 0, 0, 863, 864, 5, 105, 0, 0, 864, 865, 5, 111, 0, 0, 865, 866, 5, 110, 0, 0, 866, 867, 5, 115, 0, 0, 867, 193, 1, 0, 0, 0, 868, 869, 5, 109, 0, 0, 869, 870, 5, 101, 0, 0, 870, 871, 5, 116, 0, 0, 871, 872, 5, 97, 0, 0, 872, 873, 5, 100, 0, 0, 873, 874, 5, 97, 0, 0, 874, 875, 5, 116, 0, 0, 875, 876, 5, 97, 0, 0, 876, 195, 1, 0, 0, 0, 877, 881, 8, 10, 0, 0, 878, 879, 5, 47, 0, 0, 879, 881, 8, 11, 0, 0, 880, 877, 1, 0, 0, 0, 880, 878, 1, 0, 0, 0, 881, 197, 1, 0, 0, 0, 882, 884, 3, 196, 92, 0, 883, 882, 1, 0, 0, 0, 884, 885, 1, 0, 0, 0, 885, 883, 1, 0, 0, 0, 885, 886, 1, 0, 0, 0, 886, 199, 1, 0, 0, 0, 887, 888, 3, 172, 80, 0, 888, 889, 1, 0, 0, 0, 889, 890, 6, 94, 18, 0, 890, 201, 1, 0, 0, 0, 891, 892, 3, 50, 19, 0, 892, 893, 1, 0, 0, 0, 893, 894, 6, 95, 9, 0, 894, 203, 1, 0, 0, 0, 895, 896, 3, 52, 20, 0, 896, 897, 1, 0, 0, 0, 897, 898, 6, 96, 9, 0, 898, 205, 1, 0, 0, 0, 899, 900, 3, 54, 21, 0, 900, 901, 1, 0, 0, 0, 901, 902, 6, 97, 9, 0, 902, 207, 1, 0, 0, 0, 903, 904, 3, 66, 27, 0, 904, 905, 1, 0, 0, 0, 905, 906, 6, 98, 12, 0, 906, 907, 6, 98, 13, 0, 907, 209, 1, 0, 0, 0, 908, 909, 3, 108, 48, 0, 909, 910, 1, 0, 0, 0, 910, 911, 6, 99, 19, 0, 911, 211, 1, 0, 0, 0, 912, 913, 3, 104, 46, 0, 913, 914, 1, 0, 0, 0, 914, 915, 6, 100, 15, 0, 915, 213, 1, 0, 0, 0, 916, 921, 3, 70, 29, 0, 917, 921, 3, 68, 28, 0, 918, 921, 3, 84, 36, 0, 919, 921, 3, 158, 73, 0, 920, 916, 1, 0, 0, 0, 920, 917, 1, 0, 0, 0, 920, 918, 1, 0, 0, 0, 920, 919, 1, 0, 0, 0, 921, 215, 1, 0, 0, 0, 922, 925, 3, 70, 29, 0, 923, 925, 3, 158, 73, 0, 924, 922, 1, 0, 0, 0, 924, 923, 1, 0, 0, 0, 925, 929, 1, 0, 0, 0, 926, 928, 3, 214, 101, 0, 927, 926, 1, 0, 0, 0, 928, 931, 1, 0, 0, 0, 929, 927, 1, 0, 0, 0, 929, 930, 1, 0, 0, 0, 930, 942, 1, 0, 0, 0, 931, 929, 1, 0, 0, 0, 932, 935, 3, 84, 36, 0, 933, 935, 3, 78, 33, 0, 934, 932, 1, 0, 0, 0, 934, 933, 1, 0, 0, 0, 935, 937, 1, 0, 0, 0, 936, 938, 3, 214, 101, 0, 937, 936, 1, 0, 0, 0, 938, 939, 1, 0, 0, 0, 939, 937, 1, 0, 0, 0, 939, 940, 1, 0, 0, 0, 940, 942, 1, 0, 0, 0, 941, 924, 1, 0, 0, 0, 941, 934, 1, 0, 0, 0, 942, 217, 1, 0, 0, 0, 943, 946, 3, 216, 102, 0, 944, 946, 3, 170, 79, 0, 945, 943, 1, 0, 0, 0, 945, 944, 1, 0, 0, 0, 946, 947, 1, 0, 0, 0, 947, 945, 1, 0, 0, 0, 947, 948, 1, 0, 0, 0, 948, 219, 1, 0, 0, 0, 949, 950, 3, 50, 19, 0, 950, 951, 1, 0, 0, 0, 951, 952, 6, 104, 9, 0, 952, 221, 1, 0, 0, 0, 953, 954, 3, 52, 20, 0, 954, 955, 1, 0, 0, 0, 955, 956, 6, 105, 9, 0, 956, 223, 1, 0, 0, 0, 957, 958, 3, 54, 21, 0, 958, 959, 1, 0, 0, 0, 959, 960, 6, 106, 9, 0, 960, 225, 1, 0, 0, 0, 961, 962, 3, 66, 27, 0, 962, 963, 1, 0, 0, 0, 963, 964, 6, 107, 12, 0, 964, 965, 6, 107, 13, 0, 965, 227, 1, 0, 0, 0, 966, 967, 3, 100, 44, 0, 967, 968, 1, 0, 0, 0, 968, 969, 6, 108, 16, 0, 969, 229, 1, 0, 0, 0, 970, 971, 3, 104, 46, 0, 971, 972, 1, 0, 0, 0, 972, 973, 6, 109, 15, 0, 973, 231, 1, 0, 0, 0, 974, 975, 3, 108, 48, 0, 975, 976, 1, 0, 0, 0, 976, 977, 6, 110, 19, 0, 977, 233, 1, 0, 0, 0, 978, 979, 5, 97, 0, 0, 979, 980, 5, 115, 0, 0, 980, 235, 1, 0, 0, 0, 981, 982, 3, 218, 103, 0, 982, 983, 1, 0, 0, 0, 983, 984, 6, 112, 20, 0, 984, 237, 1, 0, 0, 0, 985, 986, 3, 50, 19, 0, 986, 987, 1, 0, 0, 0, 987, 988, 6, 113, 9, 0, 988, 239, 1, 0, 0, 0, 989, 990, 3, 52, 20, 0, 990, 991, 1, 0, 0, 0, 991, 992, 6, 114, 9, 0, 992, 241, 1, 0, 0, 0, 993, 994, 3, 54, 21, 0, 994, 995, 1, 0, 0, 0, 995, 996, 6, 115, 9, 0, 996, 243, 1, 0, 0, 0, 997, 998, 3, 66, 27, 0, 998, 999, 1, 0, 0, 0, 999, 1000, 6, 116, 12, 0, 1000, 1001, 6, 116, 13, 0, 1001, 245, 1, 0, 0, 0, 1002, 1003, 3, 164, 76, 0, 1003, 1004, 1, 0, 0, 0, 1004, 1005, 6, 117, 10, 0, 1005, 1006, 6, 117, 21, 0, 1006, 247, 1, 0, 0, 0, 1007, 1008, 5, 111, 0, 0, 1008, 1009, 5, 110, 0, 0, 1009, 1010, 1, 0, 0, 0, 1010, 1011, 6, 118, 22, 0, 1011, 249, 1, 0, 0, 0, 1012, 1013, 5, 119, 0, 0, 1013, 1014, 5, 105, 0, 0, 1014, 1015, 5, 116, 0, 0, 1015, 1016, 5, 104, 0, 0, 1016, 1017, 1, 0, 0, 0, 1017, 1018, 6, 119, 22, 0, 1018, 251, 1, 0, 0, 0, 1019, 1020, 8, 12, 0, 0, 1020, 253, 1, 0, 0, 0, 1021, 1023, 3, 252, 120, 0, 1022, 1021, 1, 0, 0, 0, 1023, 1024, 1, 0, 0, 0, 1024, 1022, 1, 0, 0, 0, 1024, 1025, 1, 0, 0, 0, 1025, 1026, 1, 0, 0, 0, 1026, 1027, 3, 322, 155, 0, 1027, 1029, 1, 0, 0, 0, 1028, 1022, 1, 0, 0, 0, 1028, 1029, 1, 0, 0, 0, 1029, 1031, 1, 0, 0, 0, 1030, 1032, 3, 252, 120, 0, 1031, 1030, 1, 0, 0, 0, 1032, 1033, 1, 0, 0, 0, 1033, 1031, 1, 0, 0, 0, 1033, 1034, 1, 0, 0, 0, 1034, 255, 1, 0, 0, 0, 1035, 1036, 3, 172, 80, 0, 1036, 1037, 1, 0, 0, 0, 1037, 1038, 6, 122, 18, 0, 1038, 257, 1, 0, 0, 0, 1039, 1040, 3, 254, 121, 0, 1040, 1041, 1, 0, 0, 0, 1041, 1042, 6, 123, 23, 0, 1042, 259, 1, 0, 0, 0, 1043, 1044, 3, 50, 19, 0, 1044, 1045, 1, 0, 0, 0, 1045, 1046, 6, 124, 9, 0, 1046, 261, 1, 0, 0, 0, 1047, 1048, 3, 52, 20, 0, 1048, 1049, 1, 0, 0, 0, 1049, 1050, 6, 125, 9, 0, 1050, 263, 1, 0, 0, 0, 1051, 1052, 3, 54, 21, 0, 1052, 1053, 1, 0, 0, 0, 1053, 1054, 6, 126, 9, 0, 1054, 265, 1, 0, 0, 0, 1055, 1056, 3, 66, 27, 0, 1056, 1057, 1, 0, 0, 0, 1057, 1058, 6, 127, 12, 0, 1058, 1059, 6, 127, 13, 0, 1059, 1060, 6, 127, 13, 0, 1060, 267, 1, 0, 0, 0, 1061, 1062, 3, 100, 44, 0, 1062, 1063, 1, 0, 0, 0, 1063, 1064, 6, 128, 16, 0, 1064, 269, 1, 0, 0, 0, 1065, 1066, 3, 104, 46, 0, 1066, 1067, 1, 0, 0, 0, 1067, 1068, 6, 129, 15, 0, 1068, 271, 1, 0, 0, 0, 1069, 1070, 3, 108, 48, 0, 1070, 1071, 1, 0, 0, 0, 1071, 1072, 6, 130, 19, 0, 1072, 273, 1, 0, 0, 0, 1073, 1074, 3, 250, 119, 0, 1074, 1075, 1, 0, 0, 0, 1075, 1076, 6, 131, 24, 0, 1076, 275, 1, 0, 0, 0, 1077, 1078, 3, 218, 103, 0, 1078, 1079, 1, 0, 0, 0, 1079, 1080, 6, 132, 20, 0, 1080, 277, 1, 0, 0, 0, 1081, 1082, 3, 172, 80, 0, 1082, 1083, 1, 0, 0, 0, 1083, 1084, 6, 133, 18, 0, 1084, 279, 1, 0, 0, 0, 1085, 1086, 3, 50, 19, 0, 1086, 1087, 1, 0, 0, 0, 1087, 1088, 6, 134, 9, 0, 1088, 281, 1, 0, 0, 0, 1089, 1090, 3, 52, 20, 0, 1090, 1091, 1, 0, 0, 0, 1091, 1092, 6, 135, 9, 0, 1092, 283, 1, 0, 0, 0, 1093, 1094, 3, 54, 21, 0, 1094, 1095, 1, 0, 0, 0, 1095, 1096, 6, 136, 9, 0, 1096, 285, 1, 0, 0, 0, 1097, 1098, 3, 66, 27, 0, 1098, 1099, 1, 0, 0, 0, 1099, 1100, 6, 137, 12, 0, 1100, 1101, 6, 137, 13, 0, 1101, 287, 1, 0, 0, 0, 1102, 1103, 3, 108, 48, 0, 1103, 1104, 1, 0, 0, 0, 1104, 1105, 6, 138, 19, 0, 1105, 289, 1, 0, 0, 0, 1106, 1107, 3, 172, 80, 0, 1107, 1108, 1, 0, 0, 0, 1108, 1109, 6, 139, 18, 0, 1109, 291, 1, 0, 0, 0, 1110, 1111, 3, 168, 78, 0, 1111, 1112, 1, 0, 0, 0, 1112, 1113, 6, 140, 25, 0, 1113, 293, 1, 0, 0, 0, 1114, 1115, 3, 50, 19, 0, 1115, 1116, 1, 0, 0, 0, 1116, 1117, 6, 141, 9, 0, 1117, 295, 1, 0, 0, 0, 1118, 1119, 3, 52, 20, 0, 1119, 1120, 1, 0, 0, 0, 1120, 1121, 6, 142, 9, 0, 1121, 297, 1, 0, 0, 0, 1122, 1123, 3, 54, 21, 0, 1123, 1124, 1, 0, 0, 0, 1124, 1125, 6, 143, 9, 0, 1125, 299, 1, 0, 0, 0, 1126, 1127, 3, 66, 27, 0, 1127, 1128, 1, 0, 0, 0, 1128, 1129, 6, 144, 12, 0, 1129, 1130, 6, 144, 13, 0, 1130, 301, 1, 0, 0, 0, 1131, 1132, 5, 105, 0, 0, 1132, 1133, 5, 110, 0, 0, 1133, 1134, 5, 102, 0, 0, 1134, 1135, 5, 111, 0, 0, 1135, 303, 1, 0, 0, 0, 1136, 1137, 3, 50, 19, 0, 1137, 1138, 1, 0, 0, 0, 1138, 1139, 6, 146, 9, 0, 1139, 305, 1, 0, 0, 0, 1140, 1141, 3, 52, 20, 0, 1141, 1142, 1, 0, 0, 0, 1142, 1143, 6, 147, 9, 0, 1143, 307, 1, 0, 0, 0, 1144, 1145, 3, 54, 21, 0, 1145, 1146, 1, 0, 0, 0, 1146, 1147, 6, 148, 9, 0, 1147, 309, 1, 0, 0, 0, 1148, 1149, 3, 66, 27, 0, 1149, 1150, 1, 0, 0, 0, 1150, 1151, 6, 149, 12, 0, 1151, 1152, 6, 149, 13, 0, 1152, 311, 1, 0, 0, 0, 1153, 1154, 5, 102, 0, 0, 1154, 1155, 5, 117, 0, 0, 1155, 1156, 5, 110, 0, 0, 1156, 1157, 5, 99, 0, 0, 1157, 1158, 5, 116, 0, 0, 1158, 1159, 5, 105, 0, 0, 1159, 1160, 5, 111, 0, 0, 1160, 1161, 5, 110, 0, 0, 1161, 1162, 5, 115, 0, 0, 1162, 313, 1, 0, 0, 0, 1163, 1164, 3, 50, 19, 0, 1164, 1165, 1, 0, 0, 0, 1165, 1166, 6, 151, 9, 0, 1166, 315, 1, 0, 0, 0, 1167, 1168, 3, 52, 20, 0, 1168, 1169, 1, 0, 0, 0, 1169, 1170, 6, 152, 9, 0, 1170, 317, 1, 0, 0, 0, 1171, 1172, 3, 54, 21, 0, 1172, 1173, 1, 0, 0, 0, 1173, 1174, 6, 153, 9, 0, 1174, 319, 1, 0, 0, 0, 1175, 1176, 3, 166, 77, 0, 1176, 1177, 1, 0, 0, 0, 1177, 1178, 6, 154, 14, 0, 1178, 1179, 6, 154, 13, 0, 1179, 321, 1, 0, 0, 0, 1180, 1181, 5, 58, 0, 0, 1181, 323, 1, 0, 0, 0, 1182, 1188, 3, 78, 33, 0, 1183, 1188, 3, 68, 28, 0, 1184, 1188, 3, 108, 48, 0, 1185, 1188, 3, 70, 29, 0, 1186, 1188, 3, 84, 36, 0, 1187, 1182, 1, 0, 0, 0, 1187, 1183, 1, 0, 0, 0, 1187, 1184, 1, 0, 0, 0, 1187, 1185, 1, 0, 0, 0, 1187, 1186, 1, 0, 0, 0, 1188, 1189, 1, 0, 0, 0, 1189, 1187, 1, 0, 0, 0, 1189, 1190, 1, 0, 0, 0, 1190, 325, 1, 0, 0, 0, 1191, 1192, 3, 50, 19, 0, 1192, 1193, 1, 0, 0, 0, 1193, 1194, 6, 157, 9, 0, 1194, 327, 1, 0, 0, 0, 1195, 1196, 3, 52, 20, 0, 1196, 1197, 1, 0, 0, 0, 1197, 1198, 6, 158, 9, 0, 1198, 329, 1, 0, 0, 0, 1199, 1200, 3, 54, 21, 0, 1200, 1201, 1, 0, 0, 0, 1201, 1202, 6, 159, 9, 0, 1202, 331, 1, 0, 0, 0, 58, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 485, 495, 499, 502, 511, 513, 524, 565, 570, 579, 586, 591, 593, 604, 612, 615, 617, 622, 627, 633, 640, 645, 651, 654, 662, 666, 799, 804, 809, 811, 817, 880, 885, 920, 924, 929, 934, 939, 941, 945, 947, 1024, 1028, 1033, 1187, 1189, 26, 5, 2, 0, 5, 4, 0, 5, 6, 0, 5, 1, 0, 5, 3, 0, 5, 10, 0, 5, 8, 0, 5, 5, 0, 5, 9, 0, 0, 1, 0, 7, 65, 0, 5, 0, 0, 7, 26, 0, 4, 0, 0, 7, 66, 0, 7, 35, 0, 7, 33, 0, 7, 27, 0, 7, 68, 0, 7, 37, 0, 7, 78, 0, 5, 11, 0, 5, 7, 0, 7, 88, 0, 7, 87, 0, 7, 67, 0] \ No newline at end of file +[4, 0, 117, 1307, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 2, 143, 7, 143, 2, 144, 7, 144, 2, 145, 7, 145, 2, 146, 7, 146, 2, 147, 7, 147, 2, 148, 7, 148, 2, 149, 7, 149, 2, 150, 7, 150, 2, 151, 7, 151, 2, 152, 7, 152, 2, 153, 7, 153, 2, 154, 7, 154, 2, 155, 7, 155, 2, 156, 7, 156, 2, 157, 7, 157, 2, 158, 7, 158, 2, 159, 7, 159, 2, 160, 7, 160, 2, 161, 7, 161, 2, 162, 7, 162, 2, 163, 7, 163, 2, 164, 7, 164, 2, 165, 7, 165, 2, 166, 7, 166, 2, 167, 7, 167, 2, 168, 7, 168, 2, 169, 7, 169, 2, 170, 7, 170, 2, 171, 7, 171, 2, 172, 7, 172, 2, 173, 7, 173, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 4, 19, 524, 8, 19, 11, 19, 12, 19, 525, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 5, 20, 534, 8, 20, 10, 20, 12, 20, 537, 9, 20, 1, 20, 3, 20, 540, 8, 20, 1, 20, 3, 20, 543, 8, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 5, 21, 552, 8, 21, 10, 21, 12, 21, 555, 9, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 22, 4, 22, 563, 8, 22, 11, 22, 12, 22, 564, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 3, 23, 572, 8, 23, 1, 24, 4, 24, 575, 8, 24, 11, 24, 12, 24, 576, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 1, 27, 1, 27, 1, 28, 1, 28, 1, 28, 1, 28, 1, 29, 1, 29, 1, 29, 1, 29, 1, 30, 1, 30, 1, 30, 1, 30, 1, 31, 1, 31, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 1, 34, 1, 34, 1, 35, 1, 35, 3, 35, 616, 8, 35, 1, 35, 4, 35, 619, 8, 35, 11, 35, 12, 35, 620, 1, 36, 1, 36, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 3, 38, 630, 8, 38, 1, 39, 1, 39, 1, 40, 1, 40, 1, 40, 3, 40, 637, 8, 40, 1, 41, 1, 41, 1, 41, 5, 41, 642, 8, 41, 10, 41, 12, 41, 645, 9, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 5, 41, 653, 8, 41, 10, 41, 12, 41, 656, 9, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 3, 41, 663, 8, 41, 1, 41, 3, 41, 666, 8, 41, 3, 41, 668, 8, 41, 1, 42, 4, 42, 671, 8, 42, 11, 42, 12, 42, 672, 1, 43, 4, 43, 676, 8, 43, 11, 43, 12, 43, 677, 1, 43, 1, 43, 5, 43, 682, 8, 43, 10, 43, 12, 43, 685, 9, 43, 1, 43, 1, 43, 4, 43, 689, 8, 43, 11, 43, 12, 43, 690, 1, 43, 4, 43, 694, 8, 43, 11, 43, 12, 43, 695, 1, 43, 1, 43, 5, 43, 700, 8, 43, 10, 43, 12, 43, 703, 9, 43, 3, 43, 705, 8, 43, 1, 43, 1, 43, 1, 43, 1, 43, 4, 43, 711, 8, 43, 11, 43, 12, 43, 712, 1, 43, 1, 43, 3, 43, 717, 8, 43, 1, 44, 1, 44, 1, 44, 1, 45, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 59, 1, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 61, 1, 61, 1, 61, 1, 62, 1, 62, 1, 62, 1, 63, 1, 63, 1, 64, 1, 64, 1, 64, 1, 64, 1, 64, 1, 64, 1, 65, 1, 65, 1, 66, 1, 66, 1, 66, 1, 66, 1, 66, 1, 67, 1, 67, 1, 67, 1, 68, 1, 68, 1, 68, 1, 69, 1, 69, 1, 69, 1, 70, 1, 70, 1, 71, 1, 71, 1, 71, 1, 72, 1, 72, 1, 73, 1, 73, 1, 73, 1, 74, 1, 74, 1, 75, 1, 75, 1, 76, 1, 76, 1, 77, 1, 77, 1, 78, 1, 78, 1, 79, 1, 79, 1, 79, 1, 79, 1, 79, 1, 80, 1, 80, 1, 80, 1, 80, 1, 80, 1, 81, 1, 81, 5, 81, 848, 8, 81, 10, 81, 12, 81, 851, 9, 81, 1, 81, 1, 81, 3, 81, 855, 8, 81, 1, 81, 4, 81, 858, 8, 81, 11, 81, 12, 81, 859, 3, 81, 862, 8, 81, 1, 82, 1, 82, 4, 82, 866, 8, 82, 11, 82, 12, 82, 867, 1, 82, 1, 82, 1, 83, 1, 83, 1, 84, 1, 84, 1, 84, 1, 84, 1, 85, 1, 85, 1, 85, 1, 85, 1, 86, 1, 86, 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 1, 87, 1, 87, 1, 88, 1, 88, 1, 88, 1, 88, 1, 89, 1, 89, 1, 89, 1, 89, 1, 90, 1, 90, 1, 90, 1, 90, 1, 91, 1, 91, 1, 91, 1, 91, 1, 92, 1, 92, 1, 92, 1, 92, 1, 93, 1, 93, 1, 93, 1, 93, 1, 93, 1, 93, 1, 93, 1, 93, 1, 94, 1, 94, 1, 94, 1, 94, 1, 94, 1, 94, 1, 94, 1, 94, 1, 94, 1, 95, 1, 95, 1, 95, 1, 95, 1, 96, 1, 96, 1, 96, 1, 96, 1, 97, 1, 97, 1, 97, 1, 97, 1, 98, 1, 98, 1, 98, 1, 98, 1, 99, 1, 99, 1, 99, 1, 99, 1, 99, 1, 100, 1, 100, 1, 100, 1, 100, 1, 101, 1, 101, 1, 101, 1, 101, 1, 102, 1, 102, 1, 102, 1, 102, 3, 102, 961, 8, 102, 1, 103, 1, 103, 3, 103, 965, 8, 103, 1, 103, 5, 103, 968, 8, 103, 10, 103, 12, 103, 971, 9, 103, 1, 103, 1, 103, 3, 103, 975, 8, 103, 1, 103, 4, 103, 978, 8, 103, 11, 103, 12, 103, 979, 3, 103, 982, 8, 103, 1, 104, 1, 104, 4, 104, 986, 8, 104, 11, 104, 12, 104, 987, 1, 105, 1, 105, 1, 105, 1, 105, 1, 106, 1, 106, 1, 106, 1, 106, 1, 107, 1, 107, 1, 107, 1, 107, 1, 108, 1, 108, 1, 108, 1, 108, 1, 108, 1, 109, 1, 109, 1, 109, 1, 109, 1, 110, 1, 110, 1, 110, 1, 110, 1, 111, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 113, 1, 113, 1, 113, 1, 113, 1, 114, 1, 114, 1, 114, 1, 114, 1, 115, 1, 115, 1, 115, 1, 115, 1, 116, 1, 116, 1, 116, 1, 116, 1, 117, 1, 117, 1, 117, 1, 117, 1, 117, 1, 118, 1, 118, 1, 118, 1, 118, 1, 118, 1, 119, 1, 119, 1, 119, 1, 119, 1, 119, 1, 120, 1, 120, 1, 120, 1, 120, 1, 120, 1, 120, 1, 120, 1, 121, 1, 121, 1, 122, 4, 122, 1063, 8, 122, 11, 122, 12, 122, 1064, 1, 122, 1, 122, 3, 122, 1069, 8, 122, 1, 122, 4, 122, 1072, 8, 122, 11, 122, 12, 122, 1073, 1, 123, 1, 123, 1, 123, 1, 123, 1, 124, 1, 124, 1, 124, 1, 124, 1, 125, 1, 125, 1, 125, 1, 125, 1, 126, 1, 126, 1, 126, 1, 126, 1, 127, 1, 127, 1, 127, 1, 127, 1, 128, 1, 128, 1, 128, 1, 128, 1, 128, 1, 128, 1, 129, 1, 129, 1, 129, 1, 129, 1, 130, 1, 130, 1, 130, 1, 130, 1, 131, 1, 131, 1, 131, 1, 131, 1, 132, 1, 132, 1, 132, 1, 132, 1, 133, 1, 133, 1, 133, 1, 133, 1, 134, 1, 134, 1, 134, 1, 134, 1, 135, 1, 135, 1, 135, 1, 135, 1, 136, 1, 136, 1, 136, 1, 136, 1, 137, 1, 137, 1, 137, 1, 137, 1, 138, 1, 138, 1, 138, 1, 138, 1, 138, 1, 139, 1, 139, 1, 139, 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 1, 142, 1, 142, 1, 143, 1, 143, 1, 143, 1, 143, 1, 144, 1, 144, 1, 144, 1, 144, 1, 145, 1, 145, 1, 145, 1, 145, 1, 145, 1, 146, 1, 146, 1, 146, 1, 146, 1, 146, 1, 147, 1, 147, 1, 147, 1, 147, 1, 148, 1, 148, 1, 148, 1, 148, 1, 149, 1, 149, 1, 149, 1, 149, 1, 150, 1, 150, 1, 150, 1, 150, 1, 150, 1, 151, 1, 151, 1, 151, 1, 151, 1, 151, 1, 151, 1, 151, 1, 151, 1, 151, 1, 151, 1, 152, 1, 152, 1, 152, 1, 152, 1, 153, 1, 153, 1, 153, 1, 153, 1, 154, 1, 154, 1, 154, 1, 154, 1, 155, 1, 155, 1, 155, 1, 155, 1, 155, 1, 156, 1, 156, 1, 157, 1, 157, 1, 157, 1, 157, 1, 157, 4, 157, 1228, 8, 157, 11, 157, 12, 157, 1229, 1, 158, 1, 158, 1, 158, 1, 158, 1, 159, 1, 159, 1, 159, 1, 159, 1, 160, 1, 160, 1, 160, 1, 160, 1, 161, 1, 161, 1, 161, 1, 161, 1, 161, 1, 162, 1, 162, 1, 162, 1, 162, 1, 162, 1, 162, 1, 163, 1, 163, 1, 163, 1, 163, 1, 164, 1, 164, 1, 164, 1, 164, 1, 165, 1, 165, 1, 165, 1, 165, 1, 166, 1, 166, 1, 166, 1, 166, 1, 166, 1, 166, 1, 167, 1, 167, 1, 167, 1, 167, 1, 168, 1, 168, 1, 168, 1, 168, 1, 169, 1, 169, 1, 169, 1, 169, 1, 170, 1, 170, 1, 170, 1, 170, 1, 170, 1, 170, 1, 171, 1, 171, 1, 171, 1, 171, 1, 171, 1, 171, 1, 172, 1, 172, 1, 172, 1, 172, 1, 172, 1, 172, 1, 173, 1, 173, 1, 173, 1, 173, 1, 173, 2, 553, 654, 0, 174, 14, 1, 16, 2, 18, 3, 20, 4, 22, 5, 24, 6, 26, 7, 28, 8, 30, 9, 32, 10, 34, 11, 36, 12, 38, 13, 40, 14, 42, 15, 44, 16, 46, 17, 48, 18, 50, 19, 52, 20, 54, 21, 56, 22, 58, 23, 60, 0, 62, 24, 64, 0, 66, 0, 68, 25, 70, 26, 72, 27, 74, 28, 76, 0, 78, 0, 80, 0, 82, 0, 84, 0, 86, 0, 88, 0, 90, 0, 92, 0, 94, 0, 96, 29, 98, 30, 100, 31, 102, 32, 104, 33, 106, 34, 108, 35, 110, 36, 112, 37, 114, 38, 116, 39, 118, 40, 120, 41, 122, 42, 124, 43, 126, 44, 128, 45, 130, 46, 132, 47, 134, 48, 136, 49, 138, 50, 140, 51, 142, 52, 144, 53, 146, 54, 148, 55, 150, 56, 152, 57, 154, 58, 156, 59, 158, 60, 160, 61, 162, 62, 164, 63, 166, 64, 168, 65, 170, 66, 172, 67, 174, 68, 176, 69, 178, 0, 180, 70, 182, 71, 184, 72, 186, 73, 188, 0, 190, 0, 192, 0, 194, 0, 196, 0, 198, 0, 200, 74, 202, 75, 204, 0, 206, 76, 208, 77, 210, 78, 212, 0, 214, 0, 216, 0, 218, 0, 220, 0, 222, 79, 224, 80, 226, 81, 228, 82, 230, 0, 232, 0, 234, 0, 236, 0, 238, 83, 240, 0, 242, 84, 244, 85, 246, 86, 248, 0, 250, 0, 252, 87, 254, 88, 256, 0, 258, 89, 260, 0, 262, 0, 264, 90, 266, 91, 268, 92, 270, 0, 272, 0, 274, 0, 276, 0, 278, 0, 280, 0, 282, 0, 284, 93, 286, 94, 288, 95, 290, 0, 292, 0, 294, 0, 296, 0, 298, 96, 300, 97, 302, 98, 304, 0, 306, 99, 308, 100, 310, 101, 312, 102, 314, 0, 316, 103, 318, 104, 320, 105, 322, 106, 324, 0, 326, 107, 328, 108, 330, 109, 332, 110, 334, 111, 336, 0, 338, 0, 340, 112, 342, 113, 344, 114, 346, 0, 348, 115, 350, 116, 352, 117, 354, 0, 356, 0, 358, 0, 360, 0, 14, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 13, 6, 0, 9, 10, 13, 13, 32, 32, 47, 47, 91, 91, 93, 93, 2, 0, 10, 10, 13, 13, 3, 0, 9, 10, 13, 13, 32, 32, 10, 0, 9, 10, 13, 13, 32, 32, 44, 44, 47, 47, 61, 61, 91, 91, 93, 93, 96, 96, 124, 124, 2, 0, 42, 42, 47, 47, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 5, 0, 34, 34, 92, 92, 110, 110, 114, 114, 116, 116, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 2, 0, 69, 69, 101, 101, 2, 0, 43, 43, 45, 45, 1, 0, 96, 96, 11, 0, 9, 10, 13, 13, 32, 32, 34, 35, 44, 44, 47, 47, 58, 58, 60, 60, 62, 63, 92, 92, 124, 124, 1332, 0, 14, 1, 0, 0, 0, 0, 16, 1, 0, 0, 0, 0, 18, 1, 0, 0, 0, 0, 20, 1, 0, 0, 0, 0, 22, 1, 0, 0, 0, 0, 24, 1, 0, 0, 0, 0, 26, 1, 0, 0, 0, 0, 28, 1, 0, 0, 0, 0, 30, 1, 0, 0, 0, 0, 32, 1, 0, 0, 0, 0, 34, 1, 0, 0, 0, 0, 36, 1, 0, 0, 0, 0, 38, 1, 0, 0, 0, 0, 40, 1, 0, 0, 0, 0, 42, 1, 0, 0, 0, 0, 44, 1, 0, 0, 0, 0, 46, 1, 0, 0, 0, 0, 48, 1, 0, 0, 0, 0, 50, 1, 0, 0, 0, 0, 52, 1, 0, 0, 0, 0, 54, 1, 0, 0, 0, 0, 56, 1, 0, 0, 0, 0, 58, 1, 0, 0, 0, 0, 62, 1, 0, 0, 0, 1, 64, 1, 0, 0, 0, 1, 66, 1, 0, 0, 0, 1, 68, 1, 0, 0, 0, 1, 70, 1, 0, 0, 0, 1, 72, 1, 0, 0, 0, 2, 74, 1, 0, 0, 0, 2, 96, 1, 0, 0, 0, 2, 98, 1, 0, 0, 0, 2, 100, 1, 0, 0, 0, 2, 102, 1, 0, 0, 0, 2, 104, 1, 0, 0, 0, 2, 106, 1, 0, 0, 0, 2, 108, 1, 0, 0, 0, 2, 110, 1, 0, 0, 0, 2, 112, 1, 0, 0, 0, 2, 114, 1, 0, 0, 0, 2, 116, 1, 0, 0, 0, 2, 118, 1, 0, 0, 0, 2, 120, 1, 0, 0, 0, 2, 122, 1, 0, 0, 0, 2, 124, 1, 0, 0, 0, 2, 126, 1, 0, 0, 0, 2, 128, 1, 0, 0, 0, 2, 130, 1, 0, 0, 0, 2, 132, 1, 0, 0, 0, 2, 134, 1, 0, 0, 0, 2, 136, 1, 0, 0, 0, 2, 138, 1, 0, 0, 0, 2, 140, 1, 0, 0, 0, 2, 142, 1, 0, 0, 0, 2, 144, 1, 0, 0, 0, 2, 146, 1, 0, 0, 0, 2, 148, 1, 0, 0, 0, 2, 150, 1, 0, 0, 0, 2, 152, 1, 0, 0, 0, 2, 154, 1, 0, 0, 0, 2, 156, 1, 0, 0, 0, 2, 158, 1, 0, 0, 0, 2, 160, 1, 0, 0, 0, 2, 162, 1, 0, 0, 0, 2, 164, 1, 0, 0, 0, 2, 166, 1, 0, 0, 0, 2, 168, 1, 0, 0, 0, 2, 170, 1, 0, 0, 0, 2, 172, 1, 0, 0, 0, 2, 174, 1, 0, 0, 0, 2, 176, 1, 0, 0, 0, 2, 180, 1, 0, 0, 0, 2, 182, 1, 0, 0, 0, 2, 184, 1, 0, 0, 0, 2, 186, 1, 0, 0, 0, 3, 188, 1, 0, 0, 0, 3, 190, 1, 0, 0, 0, 3, 192, 1, 0, 0, 0, 3, 194, 1, 0, 0, 0, 3, 196, 1, 0, 0, 0, 3, 198, 1, 0, 0, 0, 3, 200, 1, 0, 0, 0, 3, 202, 1, 0, 0, 0, 3, 204, 1, 0, 0, 0, 3, 206, 1, 0, 0, 0, 3, 208, 1, 0, 0, 0, 3, 210, 1, 0, 0, 0, 4, 212, 1, 0, 0, 0, 4, 214, 1, 0, 0, 0, 4, 216, 1, 0, 0, 0, 4, 222, 1, 0, 0, 0, 4, 224, 1, 0, 0, 0, 4, 226, 1, 0, 0, 0, 4, 228, 1, 0, 0, 0, 5, 230, 1, 0, 0, 0, 5, 232, 1, 0, 0, 0, 5, 234, 1, 0, 0, 0, 5, 236, 1, 0, 0, 0, 5, 238, 1, 0, 0, 0, 5, 240, 1, 0, 0, 0, 5, 242, 1, 0, 0, 0, 5, 244, 1, 0, 0, 0, 5, 246, 1, 0, 0, 0, 6, 248, 1, 0, 0, 0, 6, 250, 1, 0, 0, 0, 6, 252, 1, 0, 0, 0, 6, 254, 1, 0, 0, 0, 6, 258, 1, 0, 0, 0, 6, 260, 1, 0, 0, 0, 6, 262, 1, 0, 0, 0, 6, 264, 1, 0, 0, 0, 6, 266, 1, 0, 0, 0, 6, 268, 1, 0, 0, 0, 7, 270, 1, 0, 0, 0, 7, 272, 1, 0, 0, 0, 7, 274, 1, 0, 0, 0, 7, 276, 1, 0, 0, 0, 7, 278, 1, 0, 0, 0, 7, 280, 1, 0, 0, 0, 7, 282, 1, 0, 0, 0, 7, 284, 1, 0, 0, 0, 7, 286, 1, 0, 0, 0, 7, 288, 1, 0, 0, 0, 8, 290, 1, 0, 0, 0, 8, 292, 1, 0, 0, 0, 8, 294, 1, 0, 0, 0, 8, 296, 1, 0, 0, 0, 8, 298, 1, 0, 0, 0, 8, 300, 1, 0, 0, 0, 8, 302, 1, 0, 0, 0, 9, 304, 1, 0, 0, 0, 9, 306, 1, 0, 0, 0, 9, 308, 1, 0, 0, 0, 9, 310, 1, 0, 0, 0, 9, 312, 1, 0, 0, 0, 10, 314, 1, 0, 0, 0, 10, 316, 1, 0, 0, 0, 10, 318, 1, 0, 0, 0, 10, 320, 1, 0, 0, 0, 10, 322, 1, 0, 0, 0, 11, 324, 1, 0, 0, 0, 11, 326, 1, 0, 0, 0, 11, 328, 1, 0, 0, 0, 11, 330, 1, 0, 0, 0, 11, 332, 1, 0, 0, 0, 11, 334, 1, 0, 0, 0, 12, 336, 1, 0, 0, 0, 12, 338, 1, 0, 0, 0, 12, 340, 1, 0, 0, 0, 12, 342, 1, 0, 0, 0, 12, 344, 1, 0, 0, 0, 13, 346, 1, 0, 0, 0, 13, 348, 1, 0, 0, 0, 13, 350, 1, 0, 0, 0, 13, 352, 1, 0, 0, 0, 13, 354, 1, 0, 0, 0, 13, 356, 1, 0, 0, 0, 13, 358, 1, 0, 0, 0, 13, 360, 1, 0, 0, 0, 14, 362, 1, 0, 0, 0, 16, 372, 1, 0, 0, 0, 18, 379, 1, 0, 0, 0, 20, 388, 1, 0, 0, 0, 22, 395, 1, 0, 0, 0, 24, 405, 1, 0, 0, 0, 26, 412, 1, 0, 0, 0, 28, 419, 1, 0, 0, 0, 30, 433, 1, 0, 0, 0, 32, 440, 1, 0, 0, 0, 34, 448, 1, 0, 0, 0, 36, 455, 1, 0, 0, 0, 38, 465, 1, 0, 0, 0, 40, 477, 1, 0, 0, 0, 42, 486, 1, 0, 0, 0, 44, 492, 1, 0, 0, 0, 46, 499, 1, 0, 0, 0, 48, 506, 1, 0, 0, 0, 50, 514, 1, 0, 0, 0, 52, 523, 1, 0, 0, 0, 54, 529, 1, 0, 0, 0, 56, 546, 1, 0, 0, 0, 58, 562, 1, 0, 0, 0, 60, 571, 1, 0, 0, 0, 62, 574, 1, 0, 0, 0, 64, 578, 1, 0, 0, 0, 66, 583, 1, 0, 0, 0, 68, 588, 1, 0, 0, 0, 70, 592, 1, 0, 0, 0, 72, 596, 1, 0, 0, 0, 74, 600, 1, 0, 0, 0, 76, 604, 1, 0, 0, 0, 78, 606, 1, 0, 0, 0, 80, 608, 1, 0, 0, 0, 82, 611, 1, 0, 0, 0, 84, 613, 1, 0, 0, 0, 86, 622, 1, 0, 0, 0, 88, 624, 1, 0, 0, 0, 90, 629, 1, 0, 0, 0, 92, 631, 1, 0, 0, 0, 94, 636, 1, 0, 0, 0, 96, 667, 1, 0, 0, 0, 98, 670, 1, 0, 0, 0, 100, 716, 1, 0, 0, 0, 102, 718, 1, 0, 0, 0, 104, 721, 1, 0, 0, 0, 106, 725, 1, 0, 0, 0, 108, 729, 1, 0, 0, 0, 110, 731, 1, 0, 0, 0, 112, 734, 1, 0, 0, 0, 114, 736, 1, 0, 0, 0, 116, 741, 1, 0, 0, 0, 118, 743, 1, 0, 0, 0, 120, 749, 1, 0, 0, 0, 122, 755, 1, 0, 0, 0, 124, 760, 1, 0, 0, 0, 126, 762, 1, 0, 0, 0, 128, 765, 1, 0, 0, 0, 130, 768, 1, 0, 0, 0, 132, 773, 1, 0, 0, 0, 134, 777, 1, 0, 0, 0, 136, 782, 1, 0, 0, 0, 138, 788, 1, 0, 0, 0, 140, 791, 1, 0, 0, 0, 142, 793, 1, 0, 0, 0, 144, 799, 1, 0, 0, 0, 146, 801, 1, 0, 0, 0, 148, 806, 1, 0, 0, 0, 150, 809, 1, 0, 0, 0, 152, 812, 1, 0, 0, 0, 154, 815, 1, 0, 0, 0, 156, 817, 1, 0, 0, 0, 158, 820, 1, 0, 0, 0, 160, 822, 1, 0, 0, 0, 162, 825, 1, 0, 0, 0, 164, 827, 1, 0, 0, 0, 166, 829, 1, 0, 0, 0, 168, 831, 1, 0, 0, 0, 170, 833, 1, 0, 0, 0, 172, 835, 1, 0, 0, 0, 174, 840, 1, 0, 0, 0, 176, 861, 1, 0, 0, 0, 178, 863, 1, 0, 0, 0, 180, 871, 1, 0, 0, 0, 182, 873, 1, 0, 0, 0, 184, 877, 1, 0, 0, 0, 186, 881, 1, 0, 0, 0, 188, 885, 1, 0, 0, 0, 190, 890, 1, 0, 0, 0, 192, 894, 1, 0, 0, 0, 194, 898, 1, 0, 0, 0, 196, 902, 1, 0, 0, 0, 198, 906, 1, 0, 0, 0, 200, 910, 1, 0, 0, 0, 202, 918, 1, 0, 0, 0, 204, 927, 1, 0, 0, 0, 206, 931, 1, 0, 0, 0, 208, 935, 1, 0, 0, 0, 210, 939, 1, 0, 0, 0, 212, 943, 1, 0, 0, 0, 214, 948, 1, 0, 0, 0, 216, 952, 1, 0, 0, 0, 218, 960, 1, 0, 0, 0, 220, 981, 1, 0, 0, 0, 222, 985, 1, 0, 0, 0, 224, 989, 1, 0, 0, 0, 226, 993, 1, 0, 0, 0, 228, 997, 1, 0, 0, 0, 230, 1001, 1, 0, 0, 0, 232, 1006, 1, 0, 0, 0, 234, 1010, 1, 0, 0, 0, 236, 1014, 1, 0, 0, 0, 238, 1018, 1, 0, 0, 0, 240, 1021, 1, 0, 0, 0, 242, 1025, 1, 0, 0, 0, 244, 1029, 1, 0, 0, 0, 246, 1033, 1, 0, 0, 0, 248, 1037, 1, 0, 0, 0, 250, 1042, 1, 0, 0, 0, 252, 1047, 1, 0, 0, 0, 254, 1052, 1, 0, 0, 0, 256, 1059, 1, 0, 0, 0, 258, 1068, 1, 0, 0, 0, 260, 1075, 1, 0, 0, 0, 262, 1079, 1, 0, 0, 0, 264, 1083, 1, 0, 0, 0, 266, 1087, 1, 0, 0, 0, 268, 1091, 1, 0, 0, 0, 270, 1095, 1, 0, 0, 0, 272, 1101, 1, 0, 0, 0, 274, 1105, 1, 0, 0, 0, 276, 1109, 1, 0, 0, 0, 278, 1113, 1, 0, 0, 0, 280, 1117, 1, 0, 0, 0, 282, 1121, 1, 0, 0, 0, 284, 1125, 1, 0, 0, 0, 286, 1129, 1, 0, 0, 0, 288, 1133, 1, 0, 0, 0, 290, 1137, 1, 0, 0, 0, 292, 1142, 1, 0, 0, 0, 294, 1146, 1, 0, 0, 0, 296, 1150, 1, 0, 0, 0, 298, 1154, 1, 0, 0, 0, 300, 1158, 1, 0, 0, 0, 302, 1162, 1, 0, 0, 0, 304, 1166, 1, 0, 0, 0, 306, 1171, 1, 0, 0, 0, 308, 1176, 1, 0, 0, 0, 310, 1180, 1, 0, 0, 0, 312, 1184, 1, 0, 0, 0, 314, 1188, 1, 0, 0, 0, 316, 1193, 1, 0, 0, 0, 318, 1203, 1, 0, 0, 0, 320, 1207, 1, 0, 0, 0, 322, 1211, 1, 0, 0, 0, 324, 1215, 1, 0, 0, 0, 326, 1220, 1, 0, 0, 0, 328, 1227, 1, 0, 0, 0, 330, 1231, 1, 0, 0, 0, 332, 1235, 1, 0, 0, 0, 334, 1239, 1, 0, 0, 0, 336, 1243, 1, 0, 0, 0, 338, 1248, 1, 0, 0, 0, 340, 1254, 1, 0, 0, 0, 342, 1258, 1, 0, 0, 0, 344, 1262, 1, 0, 0, 0, 346, 1266, 1, 0, 0, 0, 348, 1272, 1, 0, 0, 0, 350, 1276, 1, 0, 0, 0, 352, 1280, 1, 0, 0, 0, 354, 1284, 1, 0, 0, 0, 356, 1290, 1, 0, 0, 0, 358, 1296, 1, 0, 0, 0, 360, 1302, 1, 0, 0, 0, 362, 363, 5, 100, 0, 0, 363, 364, 5, 105, 0, 0, 364, 365, 5, 115, 0, 0, 365, 366, 5, 115, 0, 0, 366, 367, 5, 101, 0, 0, 367, 368, 5, 99, 0, 0, 368, 369, 5, 116, 0, 0, 369, 370, 1, 0, 0, 0, 370, 371, 6, 0, 0, 0, 371, 15, 1, 0, 0, 0, 372, 373, 5, 100, 0, 0, 373, 374, 5, 114, 0, 0, 374, 375, 5, 111, 0, 0, 375, 376, 5, 112, 0, 0, 376, 377, 1, 0, 0, 0, 377, 378, 6, 1, 1, 0, 378, 17, 1, 0, 0, 0, 379, 380, 5, 101, 0, 0, 380, 381, 5, 110, 0, 0, 381, 382, 5, 114, 0, 0, 382, 383, 5, 105, 0, 0, 383, 384, 5, 99, 0, 0, 384, 385, 5, 104, 0, 0, 385, 386, 1, 0, 0, 0, 386, 387, 6, 2, 2, 0, 387, 19, 1, 0, 0, 0, 388, 389, 5, 101, 0, 0, 389, 390, 5, 118, 0, 0, 390, 391, 5, 97, 0, 0, 391, 392, 5, 108, 0, 0, 392, 393, 1, 0, 0, 0, 393, 394, 6, 3, 0, 0, 394, 21, 1, 0, 0, 0, 395, 396, 5, 101, 0, 0, 396, 397, 5, 120, 0, 0, 397, 398, 5, 112, 0, 0, 398, 399, 5, 108, 0, 0, 399, 400, 5, 97, 0, 0, 400, 401, 5, 105, 0, 0, 401, 402, 5, 110, 0, 0, 402, 403, 1, 0, 0, 0, 403, 404, 6, 4, 3, 0, 404, 23, 1, 0, 0, 0, 405, 406, 5, 102, 0, 0, 406, 407, 5, 114, 0, 0, 407, 408, 5, 111, 0, 0, 408, 409, 5, 109, 0, 0, 409, 410, 1, 0, 0, 0, 410, 411, 6, 5, 4, 0, 411, 25, 1, 0, 0, 0, 412, 413, 5, 103, 0, 0, 413, 414, 5, 114, 0, 0, 414, 415, 5, 111, 0, 0, 415, 416, 5, 107, 0, 0, 416, 417, 1, 0, 0, 0, 417, 418, 6, 6, 0, 0, 418, 27, 1, 0, 0, 0, 419, 420, 5, 105, 0, 0, 420, 421, 5, 110, 0, 0, 421, 422, 5, 108, 0, 0, 422, 423, 5, 105, 0, 0, 423, 424, 5, 110, 0, 0, 424, 425, 5, 101, 0, 0, 425, 426, 5, 115, 0, 0, 426, 427, 5, 116, 0, 0, 427, 428, 5, 97, 0, 0, 428, 429, 5, 116, 0, 0, 429, 430, 5, 115, 0, 0, 430, 431, 1, 0, 0, 0, 431, 432, 6, 7, 0, 0, 432, 29, 1, 0, 0, 0, 433, 434, 5, 107, 0, 0, 434, 435, 5, 101, 0, 0, 435, 436, 5, 101, 0, 0, 436, 437, 5, 112, 0, 0, 437, 438, 1, 0, 0, 0, 438, 439, 6, 8, 1, 0, 439, 31, 1, 0, 0, 0, 440, 441, 5, 108, 0, 0, 441, 442, 5, 105, 0, 0, 442, 443, 5, 109, 0, 0, 443, 444, 5, 105, 0, 0, 444, 445, 5, 116, 0, 0, 445, 446, 1, 0, 0, 0, 446, 447, 6, 9, 0, 0, 447, 33, 1, 0, 0, 0, 448, 449, 5, 109, 0, 0, 449, 450, 5, 101, 0, 0, 450, 451, 5, 116, 0, 0, 451, 452, 5, 97, 0, 0, 452, 453, 1, 0, 0, 0, 453, 454, 6, 10, 5, 0, 454, 35, 1, 0, 0, 0, 455, 456, 5, 109, 0, 0, 456, 457, 5, 101, 0, 0, 457, 458, 5, 116, 0, 0, 458, 459, 5, 114, 0, 0, 459, 460, 5, 105, 0, 0, 460, 461, 5, 99, 0, 0, 461, 462, 5, 115, 0, 0, 462, 463, 1, 0, 0, 0, 463, 464, 6, 11, 6, 0, 464, 37, 1, 0, 0, 0, 465, 466, 5, 109, 0, 0, 466, 467, 5, 118, 0, 0, 467, 468, 5, 95, 0, 0, 468, 469, 5, 101, 0, 0, 469, 470, 5, 120, 0, 0, 470, 471, 5, 112, 0, 0, 471, 472, 5, 97, 0, 0, 472, 473, 5, 110, 0, 0, 473, 474, 5, 100, 0, 0, 474, 475, 1, 0, 0, 0, 475, 476, 6, 12, 7, 0, 476, 39, 1, 0, 0, 0, 477, 478, 5, 114, 0, 0, 478, 479, 5, 101, 0, 0, 479, 480, 5, 110, 0, 0, 480, 481, 5, 97, 0, 0, 481, 482, 5, 109, 0, 0, 482, 483, 5, 101, 0, 0, 483, 484, 1, 0, 0, 0, 484, 485, 6, 13, 8, 0, 485, 41, 1, 0, 0, 0, 486, 487, 5, 114, 0, 0, 487, 488, 5, 111, 0, 0, 488, 489, 5, 119, 0, 0, 489, 490, 1, 0, 0, 0, 490, 491, 6, 14, 0, 0, 491, 43, 1, 0, 0, 0, 492, 493, 5, 115, 0, 0, 493, 494, 5, 104, 0, 0, 494, 495, 5, 111, 0, 0, 495, 496, 5, 119, 0, 0, 496, 497, 1, 0, 0, 0, 497, 498, 6, 15, 9, 0, 498, 45, 1, 0, 0, 0, 499, 500, 5, 115, 0, 0, 500, 501, 5, 111, 0, 0, 501, 502, 5, 114, 0, 0, 502, 503, 5, 116, 0, 0, 503, 504, 1, 0, 0, 0, 504, 505, 6, 16, 0, 0, 505, 47, 1, 0, 0, 0, 506, 507, 5, 115, 0, 0, 507, 508, 5, 116, 0, 0, 508, 509, 5, 97, 0, 0, 509, 510, 5, 116, 0, 0, 510, 511, 5, 115, 0, 0, 511, 512, 1, 0, 0, 0, 512, 513, 6, 17, 0, 0, 513, 49, 1, 0, 0, 0, 514, 515, 5, 119, 0, 0, 515, 516, 5, 104, 0, 0, 516, 517, 5, 101, 0, 0, 517, 518, 5, 114, 0, 0, 518, 519, 5, 101, 0, 0, 519, 520, 1, 0, 0, 0, 520, 521, 6, 18, 0, 0, 521, 51, 1, 0, 0, 0, 522, 524, 8, 0, 0, 0, 523, 522, 1, 0, 0, 0, 524, 525, 1, 0, 0, 0, 525, 523, 1, 0, 0, 0, 525, 526, 1, 0, 0, 0, 526, 527, 1, 0, 0, 0, 527, 528, 6, 19, 0, 0, 528, 53, 1, 0, 0, 0, 529, 530, 5, 47, 0, 0, 530, 531, 5, 47, 0, 0, 531, 535, 1, 0, 0, 0, 532, 534, 8, 1, 0, 0, 533, 532, 1, 0, 0, 0, 534, 537, 1, 0, 0, 0, 535, 533, 1, 0, 0, 0, 535, 536, 1, 0, 0, 0, 536, 539, 1, 0, 0, 0, 537, 535, 1, 0, 0, 0, 538, 540, 5, 13, 0, 0, 539, 538, 1, 0, 0, 0, 539, 540, 1, 0, 0, 0, 540, 542, 1, 0, 0, 0, 541, 543, 5, 10, 0, 0, 542, 541, 1, 0, 0, 0, 542, 543, 1, 0, 0, 0, 543, 544, 1, 0, 0, 0, 544, 545, 6, 20, 10, 0, 545, 55, 1, 0, 0, 0, 546, 547, 5, 47, 0, 0, 547, 548, 5, 42, 0, 0, 548, 553, 1, 0, 0, 0, 549, 552, 3, 56, 21, 0, 550, 552, 9, 0, 0, 0, 551, 549, 1, 0, 0, 0, 551, 550, 1, 0, 0, 0, 552, 555, 1, 0, 0, 0, 553, 554, 1, 0, 0, 0, 553, 551, 1, 0, 0, 0, 554, 556, 1, 0, 0, 0, 555, 553, 1, 0, 0, 0, 556, 557, 5, 42, 0, 0, 557, 558, 5, 47, 0, 0, 558, 559, 1, 0, 0, 0, 559, 560, 6, 21, 10, 0, 560, 57, 1, 0, 0, 0, 561, 563, 7, 2, 0, 0, 562, 561, 1, 0, 0, 0, 563, 564, 1, 0, 0, 0, 564, 562, 1, 0, 0, 0, 564, 565, 1, 0, 0, 0, 565, 566, 1, 0, 0, 0, 566, 567, 6, 22, 10, 0, 567, 59, 1, 0, 0, 0, 568, 572, 8, 3, 0, 0, 569, 570, 5, 47, 0, 0, 570, 572, 8, 4, 0, 0, 571, 568, 1, 0, 0, 0, 571, 569, 1, 0, 0, 0, 572, 61, 1, 0, 0, 0, 573, 575, 3, 60, 23, 0, 574, 573, 1, 0, 0, 0, 575, 576, 1, 0, 0, 0, 576, 574, 1, 0, 0, 0, 576, 577, 1, 0, 0, 0, 577, 63, 1, 0, 0, 0, 578, 579, 3, 172, 79, 0, 579, 580, 1, 0, 0, 0, 580, 581, 6, 25, 11, 0, 581, 582, 6, 25, 12, 0, 582, 65, 1, 0, 0, 0, 583, 584, 3, 74, 30, 0, 584, 585, 1, 0, 0, 0, 585, 586, 6, 26, 13, 0, 586, 587, 6, 26, 14, 0, 587, 67, 1, 0, 0, 0, 588, 589, 3, 58, 22, 0, 589, 590, 1, 0, 0, 0, 590, 591, 6, 27, 10, 0, 591, 69, 1, 0, 0, 0, 592, 593, 3, 54, 20, 0, 593, 594, 1, 0, 0, 0, 594, 595, 6, 28, 10, 0, 595, 71, 1, 0, 0, 0, 596, 597, 3, 56, 21, 0, 597, 598, 1, 0, 0, 0, 598, 599, 6, 29, 10, 0, 599, 73, 1, 0, 0, 0, 600, 601, 5, 124, 0, 0, 601, 602, 1, 0, 0, 0, 602, 603, 6, 30, 14, 0, 603, 75, 1, 0, 0, 0, 604, 605, 7, 5, 0, 0, 605, 77, 1, 0, 0, 0, 606, 607, 7, 6, 0, 0, 607, 79, 1, 0, 0, 0, 608, 609, 5, 92, 0, 0, 609, 610, 7, 7, 0, 0, 610, 81, 1, 0, 0, 0, 611, 612, 8, 8, 0, 0, 612, 83, 1, 0, 0, 0, 613, 615, 7, 9, 0, 0, 614, 616, 7, 10, 0, 0, 615, 614, 1, 0, 0, 0, 615, 616, 1, 0, 0, 0, 616, 618, 1, 0, 0, 0, 617, 619, 3, 76, 31, 0, 618, 617, 1, 0, 0, 0, 619, 620, 1, 0, 0, 0, 620, 618, 1, 0, 0, 0, 620, 621, 1, 0, 0, 0, 621, 85, 1, 0, 0, 0, 622, 623, 5, 64, 0, 0, 623, 87, 1, 0, 0, 0, 624, 625, 5, 96, 0, 0, 625, 89, 1, 0, 0, 0, 626, 630, 8, 11, 0, 0, 627, 628, 5, 96, 0, 0, 628, 630, 5, 96, 0, 0, 629, 626, 1, 0, 0, 0, 629, 627, 1, 0, 0, 0, 630, 91, 1, 0, 0, 0, 631, 632, 5, 95, 0, 0, 632, 93, 1, 0, 0, 0, 633, 637, 3, 78, 32, 0, 634, 637, 3, 76, 31, 0, 635, 637, 3, 92, 39, 0, 636, 633, 1, 0, 0, 0, 636, 634, 1, 0, 0, 0, 636, 635, 1, 0, 0, 0, 637, 95, 1, 0, 0, 0, 638, 643, 5, 34, 0, 0, 639, 642, 3, 80, 33, 0, 640, 642, 3, 82, 34, 0, 641, 639, 1, 0, 0, 0, 641, 640, 1, 0, 0, 0, 642, 645, 1, 0, 0, 0, 643, 641, 1, 0, 0, 0, 643, 644, 1, 0, 0, 0, 644, 646, 1, 0, 0, 0, 645, 643, 1, 0, 0, 0, 646, 668, 5, 34, 0, 0, 647, 648, 5, 34, 0, 0, 648, 649, 5, 34, 0, 0, 649, 650, 5, 34, 0, 0, 650, 654, 1, 0, 0, 0, 651, 653, 8, 1, 0, 0, 652, 651, 1, 0, 0, 0, 653, 656, 1, 0, 0, 0, 654, 655, 1, 0, 0, 0, 654, 652, 1, 0, 0, 0, 655, 657, 1, 0, 0, 0, 656, 654, 1, 0, 0, 0, 657, 658, 5, 34, 0, 0, 658, 659, 5, 34, 0, 0, 659, 660, 5, 34, 0, 0, 660, 662, 1, 0, 0, 0, 661, 663, 5, 34, 0, 0, 662, 661, 1, 0, 0, 0, 662, 663, 1, 0, 0, 0, 663, 665, 1, 0, 0, 0, 664, 666, 5, 34, 0, 0, 665, 664, 1, 0, 0, 0, 665, 666, 1, 0, 0, 0, 666, 668, 1, 0, 0, 0, 667, 638, 1, 0, 0, 0, 667, 647, 1, 0, 0, 0, 668, 97, 1, 0, 0, 0, 669, 671, 3, 76, 31, 0, 670, 669, 1, 0, 0, 0, 671, 672, 1, 0, 0, 0, 672, 670, 1, 0, 0, 0, 672, 673, 1, 0, 0, 0, 673, 99, 1, 0, 0, 0, 674, 676, 3, 76, 31, 0, 675, 674, 1, 0, 0, 0, 676, 677, 1, 0, 0, 0, 677, 675, 1, 0, 0, 0, 677, 678, 1, 0, 0, 0, 678, 679, 1, 0, 0, 0, 679, 683, 3, 116, 51, 0, 680, 682, 3, 76, 31, 0, 681, 680, 1, 0, 0, 0, 682, 685, 1, 0, 0, 0, 683, 681, 1, 0, 0, 0, 683, 684, 1, 0, 0, 0, 684, 717, 1, 0, 0, 0, 685, 683, 1, 0, 0, 0, 686, 688, 3, 116, 51, 0, 687, 689, 3, 76, 31, 0, 688, 687, 1, 0, 0, 0, 689, 690, 1, 0, 0, 0, 690, 688, 1, 0, 0, 0, 690, 691, 1, 0, 0, 0, 691, 717, 1, 0, 0, 0, 692, 694, 3, 76, 31, 0, 693, 692, 1, 0, 0, 0, 694, 695, 1, 0, 0, 0, 695, 693, 1, 0, 0, 0, 695, 696, 1, 0, 0, 0, 696, 704, 1, 0, 0, 0, 697, 701, 3, 116, 51, 0, 698, 700, 3, 76, 31, 0, 699, 698, 1, 0, 0, 0, 700, 703, 1, 0, 0, 0, 701, 699, 1, 0, 0, 0, 701, 702, 1, 0, 0, 0, 702, 705, 1, 0, 0, 0, 703, 701, 1, 0, 0, 0, 704, 697, 1, 0, 0, 0, 704, 705, 1, 0, 0, 0, 705, 706, 1, 0, 0, 0, 706, 707, 3, 84, 35, 0, 707, 717, 1, 0, 0, 0, 708, 710, 3, 116, 51, 0, 709, 711, 3, 76, 31, 0, 710, 709, 1, 0, 0, 0, 711, 712, 1, 0, 0, 0, 712, 710, 1, 0, 0, 0, 712, 713, 1, 0, 0, 0, 713, 714, 1, 0, 0, 0, 714, 715, 3, 84, 35, 0, 715, 717, 1, 0, 0, 0, 716, 675, 1, 0, 0, 0, 716, 686, 1, 0, 0, 0, 716, 693, 1, 0, 0, 0, 716, 708, 1, 0, 0, 0, 717, 101, 1, 0, 0, 0, 718, 719, 5, 98, 0, 0, 719, 720, 5, 121, 0, 0, 720, 103, 1, 0, 0, 0, 721, 722, 5, 97, 0, 0, 722, 723, 5, 110, 0, 0, 723, 724, 5, 100, 0, 0, 724, 105, 1, 0, 0, 0, 725, 726, 5, 97, 0, 0, 726, 727, 5, 115, 0, 0, 727, 728, 5, 99, 0, 0, 728, 107, 1, 0, 0, 0, 729, 730, 5, 61, 0, 0, 730, 109, 1, 0, 0, 0, 731, 732, 5, 58, 0, 0, 732, 733, 5, 58, 0, 0, 733, 111, 1, 0, 0, 0, 734, 735, 5, 44, 0, 0, 735, 113, 1, 0, 0, 0, 736, 737, 5, 100, 0, 0, 737, 738, 5, 101, 0, 0, 738, 739, 5, 115, 0, 0, 739, 740, 5, 99, 0, 0, 740, 115, 1, 0, 0, 0, 741, 742, 5, 46, 0, 0, 742, 117, 1, 0, 0, 0, 743, 744, 5, 102, 0, 0, 744, 745, 5, 97, 0, 0, 745, 746, 5, 108, 0, 0, 746, 747, 5, 115, 0, 0, 747, 748, 5, 101, 0, 0, 748, 119, 1, 0, 0, 0, 749, 750, 5, 102, 0, 0, 750, 751, 5, 105, 0, 0, 751, 752, 5, 114, 0, 0, 752, 753, 5, 115, 0, 0, 753, 754, 5, 116, 0, 0, 754, 121, 1, 0, 0, 0, 755, 756, 5, 108, 0, 0, 756, 757, 5, 97, 0, 0, 757, 758, 5, 115, 0, 0, 758, 759, 5, 116, 0, 0, 759, 123, 1, 0, 0, 0, 760, 761, 5, 40, 0, 0, 761, 125, 1, 0, 0, 0, 762, 763, 5, 105, 0, 0, 763, 764, 5, 110, 0, 0, 764, 127, 1, 0, 0, 0, 765, 766, 5, 105, 0, 0, 766, 767, 5, 115, 0, 0, 767, 129, 1, 0, 0, 0, 768, 769, 5, 108, 0, 0, 769, 770, 5, 105, 0, 0, 770, 771, 5, 107, 0, 0, 771, 772, 5, 101, 0, 0, 772, 131, 1, 0, 0, 0, 773, 774, 5, 110, 0, 0, 774, 775, 5, 111, 0, 0, 775, 776, 5, 116, 0, 0, 776, 133, 1, 0, 0, 0, 777, 778, 5, 110, 0, 0, 778, 779, 5, 117, 0, 0, 779, 780, 5, 108, 0, 0, 780, 781, 5, 108, 0, 0, 781, 135, 1, 0, 0, 0, 782, 783, 5, 110, 0, 0, 783, 784, 5, 117, 0, 0, 784, 785, 5, 108, 0, 0, 785, 786, 5, 108, 0, 0, 786, 787, 5, 115, 0, 0, 787, 137, 1, 0, 0, 0, 788, 789, 5, 111, 0, 0, 789, 790, 5, 114, 0, 0, 790, 139, 1, 0, 0, 0, 791, 792, 5, 63, 0, 0, 792, 141, 1, 0, 0, 0, 793, 794, 5, 114, 0, 0, 794, 795, 5, 108, 0, 0, 795, 796, 5, 105, 0, 0, 796, 797, 5, 107, 0, 0, 797, 798, 5, 101, 0, 0, 798, 143, 1, 0, 0, 0, 799, 800, 5, 41, 0, 0, 800, 145, 1, 0, 0, 0, 801, 802, 5, 116, 0, 0, 802, 803, 5, 114, 0, 0, 803, 804, 5, 117, 0, 0, 804, 805, 5, 101, 0, 0, 805, 147, 1, 0, 0, 0, 806, 807, 5, 61, 0, 0, 807, 808, 5, 61, 0, 0, 808, 149, 1, 0, 0, 0, 809, 810, 5, 61, 0, 0, 810, 811, 5, 126, 0, 0, 811, 151, 1, 0, 0, 0, 812, 813, 5, 33, 0, 0, 813, 814, 5, 61, 0, 0, 814, 153, 1, 0, 0, 0, 815, 816, 5, 60, 0, 0, 816, 155, 1, 0, 0, 0, 817, 818, 5, 60, 0, 0, 818, 819, 5, 61, 0, 0, 819, 157, 1, 0, 0, 0, 820, 821, 5, 62, 0, 0, 821, 159, 1, 0, 0, 0, 822, 823, 5, 62, 0, 0, 823, 824, 5, 61, 0, 0, 824, 161, 1, 0, 0, 0, 825, 826, 5, 43, 0, 0, 826, 163, 1, 0, 0, 0, 827, 828, 5, 45, 0, 0, 828, 165, 1, 0, 0, 0, 829, 830, 5, 42, 0, 0, 830, 167, 1, 0, 0, 0, 831, 832, 5, 47, 0, 0, 832, 169, 1, 0, 0, 0, 833, 834, 5, 37, 0, 0, 834, 171, 1, 0, 0, 0, 835, 836, 5, 91, 0, 0, 836, 837, 1, 0, 0, 0, 837, 838, 6, 79, 0, 0, 838, 839, 6, 79, 0, 0, 839, 173, 1, 0, 0, 0, 840, 841, 5, 93, 0, 0, 841, 842, 1, 0, 0, 0, 842, 843, 6, 80, 14, 0, 843, 844, 6, 80, 14, 0, 844, 175, 1, 0, 0, 0, 845, 849, 3, 78, 32, 0, 846, 848, 3, 94, 40, 0, 847, 846, 1, 0, 0, 0, 848, 851, 1, 0, 0, 0, 849, 847, 1, 0, 0, 0, 849, 850, 1, 0, 0, 0, 850, 862, 1, 0, 0, 0, 851, 849, 1, 0, 0, 0, 852, 855, 3, 92, 39, 0, 853, 855, 3, 86, 36, 0, 854, 852, 1, 0, 0, 0, 854, 853, 1, 0, 0, 0, 855, 857, 1, 0, 0, 0, 856, 858, 3, 94, 40, 0, 857, 856, 1, 0, 0, 0, 858, 859, 1, 0, 0, 0, 859, 857, 1, 0, 0, 0, 859, 860, 1, 0, 0, 0, 860, 862, 1, 0, 0, 0, 861, 845, 1, 0, 0, 0, 861, 854, 1, 0, 0, 0, 862, 177, 1, 0, 0, 0, 863, 865, 3, 88, 37, 0, 864, 866, 3, 90, 38, 0, 865, 864, 1, 0, 0, 0, 866, 867, 1, 0, 0, 0, 867, 865, 1, 0, 0, 0, 867, 868, 1, 0, 0, 0, 868, 869, 1, 0, 0, 0, 869, 870, 3, 88, 37, 0, 870, 179, 1, 0, 0, 0, 871, 872, 3, 178, 82, 0, 872, 181, 1, 0, 0, 0, 873, 874, 3, 54, 20, 0, 874, 875, 1, 0, 0, 0, 875, 876, 6, 84, 10, 0, 876, 183, 1, 0, 0, 0, 877, 878, 3, 56, 21, 0, 878, 879, 1, 0, 0, 0, 879, 880, 6, 85, 10, 0, 880, 185, 1, 0, 0, 0, 881, 882, 3, 58, 22, 0, 882, 883, 1, 0, 0, 0, 883, 884, 6, 86, 10, 0, 884, 187, 1, 0, 0, 0, 885, 886, 3, 74, 30, 0, 886, 887, 1, 0, 0, 0, 887, 888, 6, 87, 13, 0, 888, 889, 6, 87, 14, 0, 889, 189, 1, 0, 0, 0, 890, 891, 3, 172, 79, 0, 891, 892, 1, 0, 0, 0, 892, 893, 6, 88, 11, 0, 893, 191, 1, 0, 0, 0, 894, 895, 3, 174, 80, 0, 895, 896, 1, 0, 0, 0, 896, 897, 6, 89, 15, 0, 897, 193, 1, 0, 0, 0, 898, 899, 3, 112, 49, 0, 899, 900, 1, 0, 0, 0, 900, 901, 6, 90, 16, 0, 901, 195, 1, 0, 0, 0, 902, 903, 3, 108, 47, 0, 903, 904, 1, 0, 0, 0, 904, 905, 6, 91, 17, 0, 905, 197, 1, 0, 0, 0, 906, 907, 3, 96, 41, 0, 907, 908, 1, 0, 0, 0, 908, 909, 6, 92, 18, 0, 909, 199, 1, 0, 0, 0, 910, 911, 5, 111, 0, 0, 911, 912, 5, 112, 0, 0, 912, 913, 5, 116, 0, 0, 913, 914, 5, 105, 0, 0, 914, 915, 5, 111, 0, 0, 915, 916, 5, 110, 0, 0, 916, 917, 5, 115, 0, 0, 917, 201, 1, 0, 0, 0, 918, 919, 5, 109, 0, 0, 919, 920, 5, 101, 0, 0, 920, 921, 5, 116, 0, 0, 921, 922, 5, 97, 0, 0, 922, 923, 5, 100, 0, 0, 923, 924, 5, 97, 0, 0, 924, 925, 5, 116, 0, 0, 925, 926, 5, 97, 0, 0, 926, 203, 1, 0, 0, 0, 927, 928, 3, 62, 24, 0, 928, 929, 1, 0, 0, 0, 929, 930, 6, 95, 19, 0, 930, 205, 1, 0, 0, 0, 931, 932, 3, 54, 20, 0, 932, 933, 1, 0, 0, 0, 933, 934, 6, 96, 10, 0, 934, 207, 1, 0, 0, 0, 935, 936, 3, 56, 21, 0, 936, 937, 1, 0, 0, 0, 937, 938, 6, 97, 10, 0, 938, 209, 1, 0, 0, 0, 939, 940, 3, 58, 22, 0, 940, 941, 1, 0, 0, 0, 941, 942, 6, 98, 10, 0, 942, 211, 1, 0, 0, 0, 943, 944, 3, 74, 30, 0, 944, 945, 1, 0, 0, 0, 945, 946, 6, 99, 13, 0, 946, 947, 6, 99, 14, 0, 947, 213, 1, 0, 0, 0, 948, 949, 3, 116, 51, 0, 949, 950, 1, 0, 0, 0, 950, 951, 6, 100, 20, 0, 951, 215, 1, 0, 0, 0, 952, 953, 3, 112, 49, 0, 953, 954, 1, 0, 0, 0, 954, 955, 6, 101, 16, 0, 955, 217, 1, 0, 0, 0, 956, 961, 3, 78, 32, 0, 957, 961, 3, 76, 31, 0, 958, 961, 3, 92, 39, 0, 959, 961, 3, 166, 76, 0, 960, 956, 1, 0, 0, 0, 960, 957, 1, 0, 0, 0, 960, 958, 1, 0, 0, 0, 960, 959, 1, 0, 0, 0, 961, 219, 1, 0, 0, 0, 962, 965, 3, 78, 32, 0, 963, 965, 3, 166, 76, 0, 964, 962, 1, 0, 0, 0, 964, 963, 1, 0, 0, 0, 965, 969, 1, 0, 0, 0, 966, 968, 3, 218, 102, 0, 967, 966, 1, 0, 0, 0, 968, 971, 1, 0, 0, 0, 969, 967, 1, 0, 0, 0, 969, 970, 1, 0, 0, 0, 970, 982, 1, 0, 0, 0, 971, 969, 1, 0, 0, 0, 972, 975, 3, 92, 39, 0, 973, 975, 3, 86, 36, 0, 974, 972, 1, 0, 0, 0, 974, 973, 1, 0, 0, 0, 975, 977, 1, 0, 0, 0, 976, 978, 3, 218, 102, 0, 977, 976, 1, 0, 0, 0, 978, 979, 1, 0, 0, 0, 979, 977, 1, 0, 0, 0, 979, 980, 1, 0, 0, 0, 980, 982, 1, 0, 0, 0, 981, 964, 1, 0, 0, 0, 981, 974, 1, 0, 0, 0, 982, 221, 1, 0, 0, 0, 983, 986, 3, 220, 103, 0, 984, 986, 3, 178, 82, 0, 985, 983, 1, 0, 0, 0, 985, 984, 1, 0, 0, 0, 986, 987, 1, 0, 0, 0, 987, 985, 1, 0, 0, 0, 987, 988, 1, 0, 0, 0, 988, 223, 1, 0, 0, 0, 989, 990, 3, 54, 20, 0, 990, 991, 1, 0, 0, 0, 991, 992, 6, 105, 10, 0, 992, 225, 1, 0, 0, 0, 993, 994, 3, 56, 21, 0, 994, 995, 1, 0, 0, 0, 995, 996, 6, 106, 10, 0, 996, 227, 1, 0, 0, 0, 997, 998, 3, 58, 22, 0, 998, 999, 1, 0, 0, 0, 999, 1000, 6, 107, 10, 0, 1000, 229, 1, 0, 0, 0, 1001, 1002, 3, 74, 30, 0, 1002, 1003, 1, 0, 0, 0, 1003, 1004, 6, 108, 13, 0, 1004, 1005, 6, 108, 14, 0, 1005, 231, 1, 0, 0, 0, 1006, 1007, 3, 108, 47, 0, 1007, 1008, 1, 0, 0, 0, 1008, 1009, 6, 109, 17, 0, 1009, 233, 1, 0, 0, 0, 1010, 1011, 3, 112, 49, 0, 1011, 1012, 1, 0, 0, 0, 1012, 1013, 6, 110, 16, 0, 1013, 235, 1, 0, 0, 0, 1014, 1015, 3, 116, 51, 0, 1015, 1016, 1, 0, 0, 0, 1016, 1017, 6, 111, 20, 0, 1017, 237, 1, 0, 0, 0, 1018, 1019, 5, 97, 0, 0, 1019, 1020, 5, 115, 0, 0, 1020, 239, 1, 0, 0, 0, 1021, 1022, 3, 222, 104, 0, 1022, 1023, 1, 0, 0, 0, 1023, 1024, 6, 113, 21, 0, 1024, 241, 1, 0, 0, 0, 1025, 1026, 3, 54, 20, 0, 1026, 1027, 1, 0, 0, 0, 1027, 1028, 6, 114, 10, 0, 1028, 243, 1, 0, 0, 0, 1029, 1030, 3, 56, 21, 0, 1030, 1031, 1, 0, 0, 0, 1031, 1032, 6, 115, 10, 0, 1032, 245, 1, 0, 0, 0, 1033, 1034, 3, 58, 22, 0, 1034, 1035, 1, 0, 0, 0, 1035, 1036, 6, 116, 10, 0, 1036, 247, 1, 0, 0, 0, 1037, 1038, 3, 74, 30, 0, 1038, 1039, 1, 0, 0, 0, 1039, 1040, 6, 117, 13, 0, 1040, 1041, 6, 117, 14, 0, 1041, 249, 1, 0, 0, 0, 1042, 1043, 3, 172, 79, 0, 1043, 1044, 1, 0, 0, 0, 1044, 1045, 6, 118, 11, 0, 1045, 1046, 6, 118, 22, 0, 1046, 251, 1, 0, 0, 0, 1047, 1048, 5, 111, 0, 0, 1048, 1049, 5, 110, 0, 0, 1049, 1050, 1, 0, 0, 0, 1050, 1051, 6, 119, 23, 0, 1051, 253, 1, 0, 0, 0, 1052, 1053, 5, 119, 0, 0, 1053, 1054, 5, 105, 0, 0, 1054, 1055, 5, 116, 0, 0, 1055, 1056, 5, 104, 0, 0, 1056, 1057, 1, 0, 0, 0, 1057, 1058, 6, 120, 23, 0, 1058, 255, 1, 0, 0, 0, 1059, 1060, 8, 12, 0, 0, 1060, 257, 1, 0, 0, 0, 1061, 1063, 3, 256, 121, 0, 1062, 1061, 1, 0, 0, 0, 1063, 1064, 1, 0, 0, 0, 1064, 1062, 1, 0, 0, 0, 1064, 1065, 1, 0, 0, 0, 1065, 1066, 1, 0, 0, 0, 1066, 1067, 3, 326, 156, 0, 1067, 1069, 1, 0, 0, 0, 1068, 1062, 1, 0, 0, 0, 1068, 1069, 1, 0, 0, 0, 1069, 1071, 1, 0, 0, 0, 1070, 1072, 3, 256, 121, 0, 1071, 1070, 1, 0, 0, 0, 1072, 1073, 1, 0, 0, 0, 1073, 1071, 1, 0, 0, 0, 1073, 1074, 1, 0, 0, 0, 1074, 259, 1, 0, 0, 0, 1075, 1076, 3, 180, 83, 0, 1076, 1077, 1, 0, 0, 0, 1077, 1078, 6, 123, 24, 0, 1078, 261, 1, 0, 0, 0, 1079, 1080, 3, 258, 122, 0, 1080, 1081, 1, 0, 0, 0, 1081, 1082, 6, 124, 25, 0, 1082, 263, 1, 0, 0, 0, 1083, 1084, 3, 54, 20, 0, 1084, 1085, 1, 0, 0, 0, 1085, 1086, 6, 125, 10, 0, 1086, 265, 1, 0, 0, 0, 1087, 1088, 3, 56, 21, 0, 1088, 1089, 1, 0, 0, 0, 1089, 1090, 6, 126, 10, 0, 1090, 267, 1, 0, 0, 0, 1091, 1092, 3, 58, 22, 0, 1092, 1093, 1, 0, 0, 0, 1093, 1094, 6, 127, 10, 0, 1094, 269, 1, 0, 0, 0, 1095, 1096, 3, 74, 30, 0, 1096, 1097, 1, 0, 0, 0, 1097, 1098, 6, 128, 13, 0, 1098, 1099, 6, 128, 14, 0, 1099, 1100, 6, 128, 14, 0, 1100, 271, 1, 0, 0, 0, 1101, 1102, 3, 108, 47, 0, 1102, 1103, 1, 0, 0, 0, 1103, 1104, 6, 129, 17, 0, 1104, 273, 1, 0, 0, 0, 1105, 1106, 3, 112, 49, 0, 1106, 1107, 1, 0, 0, 0, 1107, 1108, 6, 130, 16, 0, 1108, 275, 1, 0, 0, 0, 1109, 1110, 3, 116, 51, 0, 1110, 1111, 1, 0, 0, 0, 1111, 1112, 6, 131, 20, 0, 1112, 277, 1, 0, 0, 0, 1113, 1114, 3, 254, 120, 0, 1114, 1115, 1, 0, 0, 0, 1115, 1116, 6, 132, 26, 0, 1116, 279, 1, 0, 0, 0, 1117, 1118, 3, 222, 104, 0, 1118, 1119, 1, 0, 0, 0, 1119, 1120, 6, 133, 21, 0, 1120, 281, 1, 0, 0, 0, 1121, 1122, 3, 180, 83, 0, 1122, 1123, 1, 0, 0, 0, 1123, 1124, 6, 134, 24, 0, 1124, 283, 1, 0, 0, 0, 1125, 1126, 3, 54, 20, 0, 1126, 1127, 1, 0, 0, 0, 1127, 1128, 6, 135, 10, 0, 1128, 285, 1, 0, 0, 0, 1129, 1130, 3, 56, 21, 0, 1130, 1131, 1, 0, 0, 0, 1131, 1132, 6, 136, 10, 0, 1132, 287, 1, 0, 0, 0, 1133, 1134, 3, 58, 22, 0, 1134, 1135, 1, 0, 0, 0, 1135, 1136, 6, 137, 10, 0, 1136, 289, 1, 0, 0, 0, 1137, 1138, 3, 74, 30, 0, 1138, 1139, 1, 0, 0, 0, 1139, 1140, 6, 138, 13, 0, 1140, 1141, 6, 138, 14, 0, 1141, 291, 1, 0, 0, 0, 1142, 1143, 3, 116, 51, 0, 1143, 1144, 1, 0, 0, 0, 1144, 1145, 6, 139, 20, 0, 1145, 293, 1, 0, 0, 0, 1146, 1147, 3, 180, 83, 0, 1147, 1148, 1, 0, 0, 0, 1148, 1149, 6, 140, 24, 0, 1149, 295, 1, 0, 0, 0, 1150, 1151, 3, 176, 81, 0, 1151, 1152, 1, 0, 0, 0, 1152, 1153, 6, 141, 27, 0, 1153, 297, 1, 0, 0, 0, 1154, 1155, 3, 54, 20, 0, 1155, 1156, 1, 0, 0, 0, 1156, 1157, 6, 142, 10, 0, 1157, 299, 1, 0, 0, 0, 1158, 1159, 3, 56, 21, 0, 1159, 1160, 1, 0, 0, 0, 1160, 1161, 6, 143, 10, 0, 1161, 301, 1, 0, 0, 0, 1162, 1163, 3, 58, 22, 0, 1163, 1164, 1, 0, 0, 0, 1164, 1165, 6, 144, 10, 0, 1165, 303, 1, 0, 0, 0, 1166, 1167, 3, 74, 30, 0, 1167, 1168, 1, 0, 0, 0, 1168, 1169, 6, 145, 13, 0, 1169, 1170, 6, 145, 14, 0, 1170, 305, 1, 0, 0, 0, 1171, 1172, 5, 105, 0, 0, 1172, 1173, 5, 110, 0, 0, 1173, 1174, 5, 102, 0, 0, 1174, 1175, 5, 111, 0, 0, 1175, 307, 1, 0, 0, 0, 1176, 1177, 3, 54, 20, 0, 1177, 1178, 1, 0, 0, 0, 1178, 1179, 6, 147, 10, 0, 1179, 309, 1, 0, 0, 0, 1180, 1181, 3, 56, 21, 0, 1181, 1182, 1, 0, 0, 0, 1182, 1183, 6, 148, 10, 0, 1183, 311, 1, 0, 0, 0, 1184, 1185, 3, 58, 22, 0, 1185, 1186, 1, 0, 0, 0, 1186, 1187, 6, 149, 10, 0, 1187, 313, 1, 0, 0, 0, 1188, 1189, 3, 74, 30, 0, 1189, 1190, 1, 0, 0, 0, 1190, 1191, 6, 150, 13, 0, 1191, 1192, 6, 150, 14, 0, 1192, 315, 1, 0, 0, 0, 1193, 1194, 5, 102, 0, 0, 1194, 1195, 5, 117, 0, 0, 1195, 1196, 5, 110, 0, 0, 1196, 1197, 5, 99, 0, 0, 1197, 1198, 5, 116, 0, 0, 1198, 1199, 5, 105, 0, 0, 1199, 1200, 5, 111, 0, 0, 1200, 1201, 5, 110, 0, 0, 1201, 1202, 5, 115, 0, 0, 1202, 317, 1, 0, 0, 0, 1203, 1204, 3, 54, 20, 0, 1204, 1205, 1, 0, 0, 0, 1205, 1206, 6, 152, 10, 0, 1206, 319, 1, 0, 0, 0, 1207, 1208, 3, 56, 21, 0, 1208, 1209, 1, 0, 0, 0, 1209, 1210, 6, 153, 10, 0, 1210, 321, 1, 0, 0, 0, 1211, 1212, 3, 58, 22, 0, 1212, 1213, 1, 0, 0, 0, 1213, 1214, 6, 154, 10, 0, 1214, 323, 1, 0, 0, 0, 1215, 1216, 3, 174, 80, 0, 1216, 1217, 1, 0, 0, 0, 1217, 1218, 6, 155, 15, 0, 1218, 1219, 6, 155, 14, 0, 1219, 325, 1, 0, 0, 0, 1220, 1221, 5, 58, 0, 0, 1221, 327, 1, 0, 0, 0, 1222, 1228, 3, 86, 36, 0, 1223, 1228, 3, 76, 31, 0, 1224, 1228, 3, 116, 51, 0, 1225, 1228, 3, 78, 32, 0, 1226, 1228, 3, 92, 39, 0, 1227, 1222, 1, 0, 0, 0, 1227, 1223, 1, 0, 0, 0, 1227, 1224, 1, 0, 0, 0, 1227, 1225, 1, 0, 0, 0, 1227, 1226, 1, 0, 0, 0, 1228, 1229, 1, 0, 0, 0, 1229, 1227, 1, 0, 0, 0, 1229, 1230, 1, 0, 0, 0, 1230, 329, 1, 0, 0, 0, 1231, 1232, 3, 54, 20, 0, 1232, 1233, 1, 0, 0, 0, 1233, 1234, 6, 158, 10, 0, 1234, 331, 1, 0, 0, 0, 1235, 1236, 3, 56, 21, 0, 1236, 1237, 1, 0, 0, 0, 1237, 1238, 6, 159, 10, 0, 1238, 333, 1, 0, 0, 0, 1239, 1240, 3, 58, 22, 0, 1240, 1241, 1, 0, 0, 0, 1241, 1242, 6, 160, 10, 0, 1242, 335, 1, 0, 0, 0, 1243, 1244, 3, 74, 30, 0, 1244, 1245, 1, 0, 0, 0, 1245, 1246, 6, 161, 13, 0, 1246, 1247, 6, 161, 14, 0, 1247, 337, 1, 0, 0, 0, 1248, 1249, 3, 62, 24, 0, 1249, 1250, 1, 0, 0, 0, 1250, 1251, 6, 162, 19, 0, 1251, 1252, 6, 162, 14, 0, 1252, 1253, 6, 162, 28, 0, 1253, 339, 1, 0, 0, 0, 1254, 1255, 3, 54, 20, 0, 1255, 1256, 1, 0, 0, 0, 1256, 1257, 6, 163, 10, 0, 1257, 341, 1, 0, 0, 0, 1258, 1259, 3, 56, 21, 0, 1259, 1260, 1, 0, 0, 0, 1260, 1261, 6, 164, 10, 0, 1261, 343, 1, 0, 0, 0, 1262, 1263, 3, 58, 22, 0, 1263, 1264, 1, 0, 0, 0, 1264, 1265, 6, 165, 10, 0, 1265, 345, 1, 0, 0, 0, 1266, 1267, 3, 112, 49, 0, 1267, 1268, 1, 0, 0, 0, 1268, 1269, 6, 166, 16, 0, 1269, 1270, 6, 166, 14, 0, 1270, 1271, 6, 166, 6, 0, 1271, 347, 1, 0, 0, 0, 1272, 1273, 3, 54, 20, 0, 1273, 1274, 1, 0, 0, 0, 1274, 1275, 6, 167, 10, 0, 1275, 349, 1, 0, 0, 0, 1276, 1277, 3, 56, 21, 0, 1277, 1278, 1, 0, 0, 0, 1278, 1279, 6, 168, 10, 0, 1279, 351, 1, 0, 0, 0, 1280, 1281, 3, 58, 22, 0, 1281, 1282, 1, 0, 0, 0, 1282, 1283, 6, 169, 10, 0, 1283, 353, 1, 0, 0, 0, 1284, 1285, 3, 180, 83, 0, 1285, 1286, 1, 0, 0, 0, 1286, 1287, 6, 170, 14, 0, 1287, 1288, 6, 170, 0, 0, 1288, 1289, 6, 170, 24, 0, 1289, 355, 1, 0, 0, 0, 1290, 1291, 3, 176, 81, 0, 1291, 1292, 1, 0, 0, 0, 1292, 1293, 6, 171, 14, 0, 1293, 1294, 6, 171, 0, 0, 1294, 1295, 6, 171, 27, 0, 1295, 357, 1, 0, 0, 0, 1296, 1297, 3, 102, 44, 0, 1297, 1298, 1, 0, 0, 0, 1298, 1299, 6, 172, 14, 0, 1299, 1300, 6, 172, 0, 0, 1300, 1301, 6, 172, 29, 0, 1301, 359, 1, 0, 0, 0, 1302, 1303, 3, 74, 30, 0, 1303, 1304, 1, 0, 0, 0, 1304, 1305, 6, 173, 13, 0, 1305, 1306, 6, 173, 14, 0, 1306, 361, 1, 0, 0, 0, 60, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 525, 535, 539, 542, 551, 553, 564, 571, 576, 615, 620, 629, 636, 641, 643, 654, 662, 665, 667, 672, 677, 683, 690, 695, 701, 704, 712, 716, 849, 854, 859, 861, 867, 960, 964, 969, 974, 979, 981, 985, 987, 1064, 1068, 1073, 1227, 1229, 30, 5, 2, 0, 5, 4, 0, 5, 6, 0, 5, 1, 0, 5, 3, 0, 5, 10, 0, 5, 12, 0, 5, 8, 0, 5, 5, 0, 5, 9, 0, 0, 1, 0, 7, 67, 0, 5, 0, 0, 7, 28, 0, 4, 0, 0, 7, 68, 0, 7, 37, 0, 7, 35, 0, 7, 29, 0, 7, 24, 0, 7, 39, 0, 7, 79, 0, 5, 11, 0, 5, 7, 0, 7, 70, 0, 7, 89, 0, 7, 88, 0, 7, 69, 0, 5, 13, 0, 7, 32, 0] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java index ac3354d0aa907..d7a73eeb844d0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java @@ -18,30 +18,32 @@ public class EsqlBaseLexer extends Lexer { new PredictionContextCache(); public static final int DISSECT=1, DROP=2, ENRICH=3, EVAL=4, EXPLAIN=5, FROM=6, GROK=7, INLINESTATS=8, - KEEP=9, LIMIT=10, META=11, MV_EXPAND=12, RENAME=13, ROW=14, SHOW=15, SORT=16, - STATS=17, WHERE=18, UNKNOWN_CMD=19, LINE_COMMENT=20, MULTILINE_COMMENT=21, - WS=22, EXPLAIN_WS=23, EXPLAIN_LINE_COMMENT=24, EXPLAIN_MULTILINE_COMMENT=25, - PIPE=26, QUOTED_STRING=27, INTEGER_LITERAL=28, DECIMAL_LITERAL=29, BY=30, - AND=31, ASC=32, ASSIGN=33, CAST_OP=34, COMMA=35, DESC=36, DOT=37, FALSE=38, - FIRST=39, LAST=40, LP=41, IN=42, IS=43, LIKE=44, NOT=45, NULL=46, NULLS=47, - OR=48, PARAM=49, RLIKE=50, RP=51, TRUE=52, EQ=53, CIEQ=54, NEQ=55, LT=56, - LTE=57, GT=58, GTE=59, PLUS=60, MINUS=61, ASTERISK=62, SLASH=63, PERCENT=64, - OPENING_BRACKET=65, CLOSING_BRACKET=66, UNQUOTED_IDENTIFIER=67, QUOTED_IDENTIFIER=68, - EXPR_LINE_COMMENT=69, EXPR_MULTILINE_COMMENT=70, EXPR_WS=71, OPTIONS=72, - METADATA=73, FROM_UNQUOTED_IDENTIFIER=74, FROM_LINE_COMMENT=75, FROM_MULTILINE_COMMENT=76, - FROM_WS=77, ID_PATTERN=78, PROJECT_LINE_COMMENT=79, PROJECT_MULTILINE_COMMENT=80, - PROJECT_WS=81, AS=82, RENAME_LINE_COMMENT=83, RENAME_MULTILINE_COMMENT=84, - RENAME_WS=85, ON=86, WITH=87, ENRICH_POLICY_NAME=88, ENRICH_LINE_COMMENT=89, - ENRICH_MULTILINE_COMMENT=90, ENRICH_WS=91, ENRICH_FIELD_LINE_COMMENT=92, - ENRICH_FIELD_MULTILINE_COMMENT=93, ENRICH_FIELD_WS=94, MVEXPAND_LINE_COMMENT=95, - MVEXPAND_MULTILINE_COMMENT=96, MVEXPAND_WS=97, INFO=98, SHOW_LINE_COMMENT=99, - SHOW_MULTILINE_COMMENT=100, SHOW_WS=101, FUNCTIONS=102, META_LINE_COMMENT=103, - META_MULTILINE_COMMENT=104, META_WS=105, COLON=106, SETTING=107, SETTING_LINE_COMMENT=108, - SETTTING_MULTILINE_COMMENT=109, SETTING_WS=110; + KEEP=9, LIMIT=10, META=11, METRICS=12, MV_EXPAND=13, RENAME=14, ROW=15, + SHOW=16, SORT=17, STATS=18, WHERE=19, UNKNOWN_CMD=20, LINE_COMMENT=21, + MULTILINE_COMMENT=22, WS=23, INDEX_UNQUOTED_IDENTIFIER=24, EXPLAIN_WS=25, + EXPLAIN_LINE_COMMENT=26, EXPLAIN_MULTILINE_COMMENT=27, PIPE=28, QUOTED_STRING=29, + INTEGER_LITERAL=30, DECIMAL_LITERAL=31, BY=32, AND=33, ASC=34, ASSIGN=35, + CAST_OP=36, COMMA=37, DESC=38, DOT=39, FALSE=40, FIRST=41, LAST=42, LP=43, + IN=44, IS=45, LIKE=46, NOT=47, NULL=48, NULLS=49, OR=50, PARAM=51, RLIKE=52, + RP=53, TRUE=54, EQ=55, CIEQ=56, NEQ=57, LT=58, LTE=59, GT=60, GTE=61, + PLUS=62, MINUS=63, ASTERISK=64, SLASH=65, PERCENT=66, OPENING_BRACKET=67, + CLOSING_BRACKET=68, UNQUOTED_IDENTIFIER=69, QUOTED_IDENTIFIER=70, EXPR_LINE_COMMENT=71, + EXPR_MULTILINE_COMMENT=72, EXPR_WS=73, OPTIONS=74, METADATA=75, FROM_LINE_COMMENT=76, + FROM_MULTILINE_COMMENT=77, FROM_WS=78, ID_PATTERN=79, PROJECT_LINE_COMMENT=80, + PROJECT_MULTILINE_COMMENT=81, PROJECT_WS=82, AS=83, RENAME_LINE_COMMENT=84, + RENAME_MULTILINE_COMMENT=85, RENAME_WS=86, ON=87, WITH=88, ENRICH_POLICY_NAME=89, + ENRICH_LINE_COMMENT=90, ENRICH_MULTILINE_COMMENT=91, ENRICH_WS=92, ENRICH_FIELD_LINE_COMMENT=93, + ENRICH_FIELD_MULTILINE_COMMENT=94, ENRICH_FIELD_WS=95, MVEXPAND_LINE_COMMENT=96, + MVEXPAND_MULTILINE_COMMENT=97, MVEXPAND_WS=98, INFO=99, SHOW_LINE_COMMENT=100, + SHOW_MULTILINE_COMMENT=101, SHOW_WS=102, FUNCTIONS=103, META_LINE_COMMENT=104, + META_MULTILINE_COMMENT=105, META_WS=106, COLON=107, SETTING=108, SETTING_LINE_COMMENT=109, + SETTTING_MULTILINE_COMMENT=110, SETTING_WS=111, METRICS_LINE_COMMENT=112, + METRICS_MULTILINE_COMMENT=113, METRICS_WS=114, CLOSING_METRICS_LINE_COMMENT=115, + CLOSING_METRICS_MULTILINE_COMMENT=116, CLOSING_METRICS_WS=117; public static final int EXPLAIN_MODE=1, EXPRESSION_MODE=2, FROM_MODE=3, PROJECT_MODE=4, RENAME_MODE=5, ENRICH_MODE=6, ENRICH_FIELD_MODE=7, MVEXPAND_MODE=8, SHOW_MODE=9, META_MODE=10, - SETTING_MODE=11; + SETTING_MODE=11, METRICS_MODE=12, CLOSING_METRICS_MODE=13; public static String[] channelNames = { "DEFAULT_TOKEN_CHANNEL", "HIDDEN" }; @@ -49,15 +51,16 @@ public class EsqlBaseLexer extends Lexer { public static String[] modeNames = { "DEFAULT_MODE", "EXPLAIN_MODE", "EXPRESSION_MODE", "FROM_MODE", "PROJECT_MODE", "RENAME_MODE", "ENRICH_MODE", "ENRICH_FIELD_MODE", "MVEXPAND_MODE", "SHOW_MODE", - "META_MODE", "SETTING_MODE" + "META_MODE", "SETTING_MODE", "METRICS_MODE", "CLOSING_METRICS_MODE" }; private static String[] makeRuleNames() { return new String[] { "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", "INLINESTATS", - "KEEP", "LIMIT", "META", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", - "STATS", "WHERE", "UNKNOWN_CMD", "LINE_COMMENT", "MULTILINE_COMMENT", - "WS", "EXPLAIN_OPENING_BRACKET", "EXPLAIN_PIPE", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", + "KEEP", "LIMIT", "META", "METRICS", "MV_EXPAND", "RENAME", "ROW", "SHOW", + "SORT", "STATS", "WHERE", "UNKNOWN_CMD", "LINE_COMMENT", "MULTILINE_COMMENT", + "WS", "INDEX_UNQUOTED_IDENTIFIER_PART", "INDEX_UNQUOTED_IDENTIFIER", + "EXPLAIN_OPENING_BRACKET", "EXPLAIN_PIPE", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", "PIPE", "DIGIT", "LETTER", "ESCAPE_SEQUENCE", "UNESCAPED_CHARS", "EXPONENT", "ASPERAND", "BACKQUOTE", "BACKQUOTE_BLOCK", "UNDERSCORE", "UNQUOTED_ID_BODY", "QUOTED_STRING", "INTEGER_LITERAL", @@ -69,23 +72,27 @@ private static String[] makeRuleNames() { "QUOTED_ID", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", "EXPR_WS", "FROM_PIPE", "FROM_OPENING_BRACKET", "FROM_CLOSING_BRACKET", "FROM_COMMA", "FROM_ASSIGN", "FROM_QUOTED_STRING", "OPTIONS", "METADATA", - "FROM_UNQUOTED_IDENTIFIER_PART", "FROM_UNQUOTED_IDENTIFIER", "FROM_QUOTED_IDENTIFIER", - "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", "FROM_WS", "PROJECT_PIPE", - "PROJECT_DOT", "PROJECT_COMMA", "UNQUOTED_ID_BODY_WITH_PATTERN", "UNQUOTED_ID_PATTERN", - "ID_PATTERN", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", "PROJECT_WS", - "RENAME_PIPE", "RENAME_ASSIGN", "RENAME_COMMA", "RENAME_DOT", "AS", "RENAME_ID_PATTERN", - "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", "RENAME_WS", "ENRICH_PIPE", - "ENRICH_OPENING_BRACKET", "ON", "WITH", "ENRICH_POLICY_NAME_BODY", "ENRICH_POLICY_NAME", - "ENRICH_QUOTED_IDENTIFIER", "ENRICH_MODE_UNQUOTED_VALUE", "ENRICH_LINE_COMMENT", - "ENRICH_MULTILINE_COMMENT", "ENRICH_WS", "ENRICH_FIELD_PIPE", "ENRICH_FIELD_ASSIGN", - "ENRICH_FIELD_COMMA", "ENRICH_FIELD_DOT", "ENRICH_FIELD_WITH", "ENRICH_FIELD_ID_PATTERN", - "ENRICH_FIELD_QUOTED_IDENTIFIER", "ENRICH_FIELD_LINE_COMMENT", "ENRICH_FIELD_MULTILINE_COMMENT", - "ENRICH_FIELD_WS", "MVEXPAND_PIPE", "MVEXPAND_DOT", "MVEXPAND_QUOTED_IDENTIFIER", - "MVEXPAND_UNQUOTED_IDENTIFIER", "MVEXPAND_LINE_COMMENT", "MVEXPAND_MULTILINE_COMMENT", - "MVEXPAND_WS", "SHOW_PIPE", "INFO", "SHOW_LINE_COMMENT", "SHOW_MULTILINE_COMMENT", - "SHOW_WS", "META_PIPE", "FUNCTIONS", "META_LINE_COMMENT", "META_MULTILINE_COMMENT", + "FROM_INDEX_UNQUOTED_IDENTIFIER", "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", + "FROM_WS", "PROJECT_PIPE", "PROJECT_DOT", "PROJECT_COMMA", "UNQUOTED_ID_BODY_WITH_PATTERN", + "UNQUOTED_ID_PATTERN", "ID_PATTERN", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", + "PROJECT_WS", "RENAME_PIPE", "RENAME_ASSIGN", "RENAME_COMMA", "RENAME_DOT", + "AS", "RENAME_ID_PATTERN", "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", + "RENAME_WS", "ENRICH_PIPE", "ENRICH_OPENING_BRACKET", "ON", "WITH", "ENRICH_POLICY_NAME_BODY", + "ENRICH_POLICY_NAME", "ENRICH_QUOTED_IDENTIFIER", "ENRICH_MODE_UNQUOTED_VALUE", + "ENRICH_LINE_COMMENT", "ENRICH_MULTILINE_COMMENT", "ENRICH_WS", "ENRICH_FIELD_PIPE", + "ENRICH_FIELD_ASSIGN", "ENRICH_FIELD_COMMA", "ENRICH_FIELD_DOT", "ENRICH_FIELD_WITH", + "ENRICH_FIELD_ID_PATTERN", "ENRICH_FIELD_QUOTED_IDENTIFIER", "ENRICH_FIELD_LINE_COMMENT", + "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", "MVEXPAND_PIPE", + "MVEXPAND_DOT", "MVEXPAND_QUOTED_IDENTIFIER", "MVEXPAND_UNQUOTED_IDENTIFIER", + "MVEXPAND_LINE_COMMENT", "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", + "SHOW_PIPE", "INFO", "SHOW_LINE_COMMENT", "SHOW_MULTILINE_COMMENT", "SHOW_WS", + "META_PIPE", "FUNCTIONS", "META_LINE_COMMENT", "META_MULTILINE_COMMENT", "META_WS", "SETTING_CLOSING_BRACKET", "COLON", "SETTING", "SETTING_LINE_COMMENT", - "SETTTING_MULTILINE_COMMENT", "SETTING_WS" + "SETTTING_MULTILINE_COMMENT", "SETTING_WS", "METRICS_PIPE", "METRICS_INDEX_UNQUOTED_IDENTIFIER", + "METRICS_LINE_COMMENT", "METRICS_MULTILINE_COMMENT", "METRICS_WS", "CLOSING_METRICS_COMMA", + "CLOSING_METRICS_LINE_COMMENT", "CLOSING_METRICS_MULTILINE_COMMENT", + "CLOSING_METRICS_WS", "CLOSING_METRICS_QUOTED_IDENTIFIER", "CLOSING_METRICS_UNQUOTED_IDENTIFIER", + "CLOSING_METRICS_BY", "CLOSING_METRICS_PIPE" }; } public static final String[] ruleNames = makeRuleNames(); @@ -93,15 +100,15 @@ private static String[] makeRuleNames() { private static String[] makeLiteralNames() { return new String[] { null, "'dissect'", "'drop'", "'enrich'", "'eval'", "'explain'", "'from'", - "'grok'", "'inlinestats'", "'keep'", "'limit'", "'meta'", "'mv_expand'", - "'rename'", "'row'", "'show'", "'sort'", "'stats'", "'where'", null, - null, null, null, null, null, null, "'|'", null, null, null, "'by'", - "'and'", "'asc'", "'='", "'::'", "','", "'desc'", "'.'", "'false'", "'first'", - "'last'", "'('", "'in'", "'is'", "'like'", "'not'", "'null'", "'nulls'", - "'or'", "'?'", "'rlike'", "')'", "'true'", "'=='", "'=~'", "'!='", "'<'", - "'<='", "'>'", "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", null, "']'", - null, null, null, null, null, "'options'", "'metadata'", null, null, - null, null, null, null, null, null, "'as'", null, null, null, "'on'", + "'grok'", "'inlinestats'", "'keep'", "'limit'", "'meta'", "'metrics'", + "'mv_expand'", "'rename'", "'row'", "'show'", "'sort'", "'stats'", "'where'", + null, null, null, null, null, null, null, null, "'|'", null, null, null, + "'by'", "'and'", "'asc'", "'='", "'::'", "','", "'desc'", "'.'", "'false'", + "'first'", "'last'", "'('", "'in'", "'is'", "'like'", "'not'", "'null'", + "'nulls'", "'or'", "'?'", "'rlike'", "')'", "'true'", "'=='", "'=~'", + "'!='", "'<'", "'<='", "'>'", "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", + null, "']'", null, null, null, null, null, "'options'", "'metadata'", + null, null, null, null, null, null, null, "'as'", null, null, null, "'on'", "'with'", null, null, null, null, null, null, null, null, null, null, "'info'", null, null, null, "'functions'", null, null, null, "':'" }; @@ -110,25 +117,28 @@ private static String[] makeLiteralNames() { private static String[] makeSymbolicNames() { return new String[] { null, "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", - "INLINESTATS", "KEEP", "LIMIT", "META", "MV_EXPAND", "RENAME", "ROW", - "SHOW", "SORT", "STATS", "WHERE", "UNKNOWN_CMD", "LINE_COMMENT", "MULTILINE_COMMENT", - "WS", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", - "PIPE", "QUOTED_STRING", "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", - "AND", "ASC", "ASSIGN", "CAST_OP", "COMMA", "DESC", "DOT", "FALSE", "FIRST", - "LAST", "LP", "IN", "IS", "LIKE", "NOT", "NULL", "NULLS", "OR", "PARAM", - "RLIKE", "RP", "TRUE", "EQ", "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", - "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", "OPENING_BRACKET", "CLOSING_BRACKET", - "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", - "EXPR_WS", "OPTIONS", "METADATA", "FROM_UNQUOTED_IDENTIFIER", "FROM_LINE_COMMENT", - "FROM_MULTILINE_COMMENT", "FROM_WS", "ID_PATTERN", "PROJECT_LINE_COMMENT", - "PROJECT_MULTILINE_COMMENT", "PROJECT_WS", "AS", "RENAME_LINE_COMMENT", - "RENAME_MULTILINE_COMMENT", "RENAME_WS", "ON", "WITH", "ENRICH_POLICY_NAME", - "ENRICH_LINE_COMMENT", "ENRICH_MULTILINE_COMMENT", "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", + "INLINESTATS", "KEEP", "LIMIT", "META", "METRICS", "MV_EXPAND", "RENAME", + "ROW", "SHOW", "SORT", "STATS", "WHERE", "UNKNOWN_CMD", "LINE_COMMENT", + "MULTILINE_COMMENT", "WS", "INDEX_UNQUOTED_IDENTIFIER", "EXPLAIN_WS", + "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", "PIPE", "QUOTED_STRING", + "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", + "COMMA", "DESC", "DOT", "FALSE", "FIRST", "LAST", "LP", "IN", "IS", "LIKE", + "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", "EQ", "CIEQ", + "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", + "PERCENT", "OPENING_BRACKET", "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", + "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", "EXPR_WS", + "OPTIONS", "METADATA", "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", + "FROM_WS", "ID_PATTERN", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", + "PROJECT_WS", "AS", "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", + "RENAME_WS", "ON", "WITH", "ENRICH_POLICY_NAME", "ENRICH_LINE_COMMENT", + "ENRICH_MULTILINE_COMMENT", "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", "MVEXPAND_LINE_COMMENT", "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", "INFO", "SHOW_LINE_COMMENT", "SHOW_MULTILINE_COMMENT", "SHOW_WS", "FUNCTIONS", "META_LINE_COMMENT", "META_MULTILINE_COMMENT", "META_WS", "COLON", "SETTING", "SETTING_LINE_COMMENT", - "SETTTING_MULTILINE_COMMENT", "SETTING_WS" + "SETTTING_MULTILINE_COMMENT", "SETTING_WS", "METRICS_LINE_COMMENT", "METRICS_MULTILINE_COMMENT", + "METRICS_WS", "CLOSING_METRICS_LINE_COMMENT", "CLOSING_METRICS_MULTILINE_COMMENT", + "CLOSING_METRICS_WS" }; } private static final String[] _SYMBOLIC_NAMES = makeSymbolicNames(); @@ -191,185 +201,204 @@ public EsqlBaseLexer(CharStream input) { public ATN getATN() { return _ATN; } public static final String _serializedATN = - "\u0004\u0000n\u04b3\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ + "\u0004\u0000u\u051b\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ - "\u0006\uffff\uffff\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002"+ - "\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004\u0002"+ - "\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007\u0002"+ - "\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b\u0002"+ - "\f\u0007\f\u0002\r\u0007\r\u0002\u000e\u0007\u000e\u0002\u000f\u0007\u000f"+ - "\u0002\u0010\u0007\u0010\u0002\u0011\u0007\u0011\u0002\u0012\u0007\u0012"+ - "\u0002\u0013\u0007\u0013\u0002\u0014\u0007\u0014\u0002\u0015\u0007\u0015"+ - "\u0002\u0016\u0007\u0016\u0002\u0017\u0007\u0017\u0002\u0018\u0007\u0018"+ - "\u0002\u0019\u0007\u0019\u0002\u001a\u0007\u001a\u0002\u001b\u0007\u001b"+ - "\u0002\u001c\u0007\u001c\u0002\u001d\u0007\u001d\u0002\u001e\u0007\u001e"+ - "\u0002\u001f\u0007\u001f\u0002 \u0007 \u0002!\u0007!\u0002\"\u0007\"\u0002"+ - "#\u0007#\u0002$\u0007$\u0002%\u0007%\u0002&\u0007&\u0002\'\u0007\'\u0002"+ - "(\u0007(\u0002)\u0007)\u0002*\u0007*\u0002+\u0007+\u0002,\u0007,\u0002"+ - "-\u0007-\u0002.\u0007.\u0002/\u0007/\u00020\u00070\u00021\u00071\u0002"+ - "2\u00072\u00023\u00073\u00024\u00074\u00025\u00075\u00026\u00076\u0002"+ - "7\u00077\u00028\u00078\u00029\u00079\u0002:\u0007:\u0002;\u0007;\u0002"+ - "<\u0007<\u0002=\u0007=\u0002>\u0007>\u0002?\u0007?\u0002@\u0007@\u0002"+ - "A\u0007A\u0002B\u0007B\u0002C\u0007C\u0002D\u0007D\u0002E\u0007E\u0002"+ - "F\u0007F\u0002G\u0007G\u0002H\u0007H\u0002I\u0007I\u0002J\u0007J\u0002"+ - "K\u0007K\u0002L\u0007L\u0002M\u0007M\u0002N\u0007N\u0002O\u0007O\u0002"+ - "P\u0007P\u0002Q\u0007Q\u0002R\u0007R\u0002S\u0007S\u0002T\u0007T\u0002"+ - "U\u0007U\u0002V\u0007V\u0002W\u0007W\u0002X\u0007X\u0002Y\u0007Y\u0002"+ - "Z\u0007Z\u0002[\u0007[\u0002\\\u0007\\\u0002]\u0007]\u0002^\u0007^\u0002"+ - "_\u0007_\u0002`\u0007`\u0002a\u0007a\u0002b\u0007b\u0002c\u0007c\u0002"+ - "d\u0007d\u0002e\u0007e\u0002f\u0007f\u0002g\u0007g\u0002h\u0007h\u0002"+ - "i\u0007i\u0002j\u0007j\u0002k\u0007k\u0002l\u0007l\u0002m\u0007m\u0002"+ - "n\u0007n\u0002o\u0007o\u0002p\u0007p\u0002q\u0007q\u0002r\u0007r\u0002"+ - "s\u0007s\u0002t\u0007t\u0002u\u0007u\u0002v\u0007v\u0002w\u0007w\u0002"+ - "x\u0007x\u0002y\u0007y\u0002z\u0007z\u0002{\u0007{\u0002|\u0007|\u0002"+ - "}\u0007}\u0002~\u0007~\u0002\u007f\u0007\u007f\u0002\u0080\u0007\u0080"+ - "\u0002\u0081\u0007\u0081\u0002\u0082\u0007\u0082\u0002\u0083\u0007\u0083"+ - "\u0002\u0084\u0007\u0084\u0002\u0085\u0007\u0085\u0002\u0086\u0007\u0086"+ - "\u0002\u0087\u0007\u0087\u0002\u0088\u0007\u0088\u0002\u0089\u0007\u0089"+ - "\u0002\u008a\u0007\u008a\u0002\u008b\u0007\u008b\u0002\u008c\u0007\u008c"+ - "\u0002\u008d\u0007\u008d\u0002\u008e\u0007\u008e\u0002\u008f\u0007\u008f"+ - "\u0002\u0090\u0007\u0090\u0002\u0091\u0007\u0091\u0002\u0092\u0007\u0092"+ - "\u0002\u0093\u0007\u0093\u0002\u0094\u0007\u0094\u0002\u0095\u0007\u0095"+ - "\u0002\u0096\u0007\u0096\u0002\u0097\u0007\u0097\u0002\u0098\u0007\u0098"+ - "\u0002\u0099\u0007\u0099\u0002\u009a\u0007\u009a\u0002\u009b\u0007\u009b"+ - "\u0002\u009c\u0007\u009c\u0002\u009d\u0007\u009d\u0002\u009e\u0007\u009e"+ - "\u0002\u009f\u0007\u009f\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000"+ - "\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000"+ - "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ - "\u0001\u0001\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002"+ - "\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0003\u0001\u0003"+ - "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0004"+ - "\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004"+ - "\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0005\u0001\u0005\u0001\u0005"+ - "\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0006\u0001\u0006"+ - "\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0007"+ - "\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007"+ - "\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007"+ - "\u0001\u0007\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001"+ - "\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\n\u0001"+ - "\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\u000b\u0001\u000b\u0001"+ + "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0002\u0000\u0007"+ + "\u0000\u0002\u0001\u0007\u0001\u0002\u0002\u0007\u0002\u0002\u0003\u0007"+ + "\u0003\u0002\u0004\u0007\u0004\u0002\u0005\u0007\u0005\u0002\u0006\u0007"+ + "\u0006\u0002\u0007\u0007\u0007\u0002\b\u0007\b\u0002\t\u0007\t\u0002\n"+ + "\u0007\n\u0002\u000b\u0007\u000b\u0002\f\u0007\f\u0002\r\u0007\r\u0002"+ + "\u000e\u0007\u000e\u0002\u000f\u0007\u000f\u0002\u0010\u0007\u0010\u0002"+ + "\u0011\u0007\u0011\u0002\u0012\u0007\u0012\u0002\u0013\u0007\u0013\u0002"+ + "\u0014\u0007\u0014\u0002\u0015\u0007\u0015\u0002\u0016\u0007\u0016\u0002"+ + "\u0017\u0007\u0017\u0002\u0018\u0007\u0018\u0002\u0019\u0007\u0019\u0002"+ + "\u001a\u0007\u001a\u0002\u001b\u0007\u001b\u0002\u001c\u0007\u001c\u0002"+ + "\u001d\u0007\u001d\u0002\u001e\u0007\u001e\u0002\u001f\u0007\u001f\u0002"+ + " \u0007 \u0002!\u0007!\u0002\"\u0007\"\u0002#\u0007#\u0002$\u0007$\u0002"+ + "%\u0007%\u0002&\u0007&\u0002\'\u0007\'\u0002(\u0007(\u0002)\u0007)\u0002"+ + "*\u0007*\u0002+\u0007+\u0002,\u0007,\u0002-\u0007-\u0002.\u0007.\u0002"+ + "/\u0007/\u00020\u00070\u00021\u00071\u00022\u00072\u00023\u00073\u0002"+ + "4\u00074\u00025\u00075\u00026\u00076\u00027\u00077\u00028\u00078\u0002"+ + "9\u00079\u0002:\u0007:\u0002;\u0007;\u0002<\u0007<\u0002=\u0007=\u0002"+ + ">\u0007>\u0002?\u0007?\u0002@\u0007@\u0002A\u0007A\u0002B\u0007B\u0002"+ + "C\u0007C\u0002D\u0007D\u0002E\u0007E\u0002F\u0007F\u0002G\u0007G\u0002"+ + "H\u0007H\u0002I\u0007I\u0002J\u0007J\u0002K\u0007K\u0002L\u0007L\u0002"+ + "M\u0007M\u0002N\u0007N\u0002O\u0007O\u0002P\u0007P\u0002Q\u0007Q\u0002"+ + "R\u0007R\u0002S\u0007S\u0002T\u0007T\u0002U\u0007U\u0002V\u0007V\u0002"+ + "W\u0007W\u0002X\u0007X\u0002Y\u0007Y\u0002Z\u0007Z\u0002[\u0007[\u0002"+ + "\\\u0007\\\u0002]\u0007]\u0002^\u0007^\u0002_\u0007_\u0002`\u0007`\u0002"+ + "a\u0007a\u0002b\u0007b\u0002c\u0007c\u0002d\u0007d\u0002e\u0007e\u0002"+ + "f\u0007f\u0002g\u0007g\u0002h\u0007h\u0002i\u0007i\u0002j\u0007j\u0002"+ + "k\u0007k\u0002l\u0007l\u0002m\u0007m\u0002n\u0007n\u0002o\u0007o\u0002"+ + "p\u0007p\u0002q\u0007q\u0002r\u0007r\u0002s\u0007s\u0002t\u0007t\u0002"+ + "u\u0007u\u0002v\u0007v\u0002w\u0007w\u0002x\u0007x\u0002y\u0007y\u0002"+ + "z\u0007z\u0002{\u0007{\u0002|\u0007|\u0002}\u0007}\u0002~\u0007~\u0002"+ + "\u007f\u0007\u007f\u0002\u0080\u0007\u0080\u0002\u0081\u0007\u0081\u0002"+ + "\u0082\u0007\u0082\u0002\u0083\u0007\u0083\u0002\u0084\u0007\u0084\u0002"+ + "\u0085\u0007\u0085\u0002\u0086\u0007\u0086\u0002\u0087\u0007\u0087\u0002"+ + "\u0088\u0007\u0088\u0002\u0089\u0007\u0089\u0002\u008a\u0007\u008a\u0002"+ + "\u008b\u0007\u008b\u0002\u008c\u0007\u008c\u0002\u008d\u0007\u008d\u0002"+ + "\u008e\u0007\u008e\u0002\u008f\u0007\u008f\u0002\u0090\u0007\u0090\u0002"+ + "\u0091\u0007\u0091\u0002\u0092\u0007\u0092\u0002\u0093\u0007\u0093\u0002"+ + "\u0094\u0007\u0094\u0002\u0095\u0007\u0095\u0002\u0096\u0007\u0096\u0002"+ + "\u0097\u0007\u0097\u0002\u0098\u0007\u0098\u0002\u0099\u0007\u0099\u0002"+ + "\u009a\u0007\u009a\u0002\u009b\u0007\u009b\u0002\u009c\u0007\u009c\u0002"+ + "\u009d\u0007\u009d\u0002\u009e\u0007\u009e\u0002\u009f\u0007\u009f\u0002"+ + "\u00a0\u0007\u00a0\u0002\u00a1\u0007\u00a1\u0002\u00a2\u0007\u00a2\u0002"+ + "\u00a3\u0007\u00a3\u0002\u00a4\u0007\u00a4\u0002\u00a5\u0007\u00a5\u0002"+ + "\u00a6\u0007\u00a6\u0002\u00a7\u0007\u00a7\u0002\u00a8\u0007\u00a8\u0002"+ + "\u00a9\u0007\u00a9\u0002\u00aa\u0007\u00aa\u0002\u00ab\u0007\u00ab\u0002"+ + "\u00ac\u0007\u00ac\u0002\u00ad\u0007\u00ad\u0001\u0000\u0001\u0000\u0001"+ + "\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001"+ + "\u0000\u0001\u0000\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ + "\u0001\u0001\u0001\u0001\u0001\u0001\u0002\u0001\u0002\u0001\u0002\u0001"+ + "\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001"+ + "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ + "\u0003\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001"+ + "\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0005\u0001"+ + "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ + "\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001"+ + "\u0006\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001"+ + "\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001"+ + "\u0007\u0001\u0007\u0001\u0007\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b"+ + "\u0001\b\u0001\b\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001"+ + "\t\u0001\t\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001"+ "\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001"+ "\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\f\u0001\f\u0001\f\u0001"+ - "\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\r\u0001\r\u0001\r\u0001"+ - "\r\u0001\r\u0001\r\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001"+ - "\u000e\u0001\u000e\u0001\u000e\u0001\u000f\u0001\u000f\u0001\u000f\u0001"+ - "\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u0010\u0001\u0010\u0001"+ - "\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001"+ - "\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001"+ - "\u0011\u0001\u0011\u0001\u0012\u0004\u0012\u01e4\b\u0012\u000b\u0012\f"+ - "\u0012\u01e5\u0001\u0012\u0001\u0012\u0001\u0013\u0001\u0013\u0001\u0013"+ - "\u0001\u0013\u0005\u0013\u01ee\b\u0013\n\u0013\f\u0013\u01f1\t\u0013\u0001"+ - "\u0013\u0003\u0013\u01f4\b\u0013\u0001\u0013\u0003\u0013\u01f7\b\u0013"+ - "\u0001\u0013\u0001\u0013\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014"+ - "\u0001\u0014\u0005\u0014\u0200\b\u0014\n\u0014\f\u0014\u0203\t\u0014\u0001"+ - "\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0015\u0004"+ - "\u0015\u020b\b\u0015\u000b\u0015\f\u0015\u020c\u0001\u0015\u0001\u0015"+ - "\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0017"+ - "\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0018\u0001\u0018"+ - "\u0001\u0018\u0001\u0018\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u0019"+ - "\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001b\u0001\u001b"+ - "\u0001\u001b\u0001\u001b\u0001\u001c\u0001\u001c\u0001\u001d\u0001\u001d"+ - "\u0001\u001e\u0001\u001e\u0001\u001e\u0001\u001f\u0001\u001f\u0001 \u0001"+ - " \u0003 \u0236\b \u0001 \u0004 \u0239\b \u000b \f \u023a\u0001!\u0001"+ - "!\u0001\"\u0001\"\u0001#\u0001#\u0001#\u0003#\u0244\b#\u0001$\u0001$\u0001"+ - "%\u0001%\u0001%\u0003%\u024b\b%\u0001&\u0001&\u0001&\u0005&\u0250\b&\n"+ - "&\f&\u0253\t&\u0001&\u0001&\u0001&\u0001&\u0001&\u0001&\u0005&\u025b\b"+ - "&\n&\f&\u025e\t&\u0001&\u0001&\u0001&\u0001&\u0001&\u0003&\u0265\b&\u0001"+ - "&\u0003&\u0268\b&\u0003&\u026a\b&\u0001\'\u0004\'\u026d\b\'\u000b\'\f"+ - "\'\u026e\u0001(\u0004(\u0272\b(\u000b(\f(\u0273\u0001(\u0001(\u0005(\u0278"+ - "\b(\n(\f(\u027b\t(\u0001(\u0001(\u0004(\u027f\b(\u000b(\f(\u0280\u0001"+ - "(\u0004(\u0284\b(\u000b(\f(\u0285\u0001(\u0001(\u0005(\u028a\b(\n(\f("+ - "\u028d\t(\u0003(\u028f\b(\u0001(\u0001(\u0001(\u0001(\u0004(\u0295\b("+ - "\u000b(\f(\u0296\u0001(\u0001(\u0003(\u029b\b(\u0001)\u0001)\u0001)\u0001"+ - "*\u0001*\u0001*\u0001*\u0001+\u0001+\u0001+\u0001+\u0001,\u0001,\u0001"+ - "-\u0001-\u0001-\u0001.\u0001.\u0001/\u0001/\u0001/\u0001/\u0001/\u0001"+ - "0\u00010\u00011\u00011\u00011\u00011\u00011\u00011\u00012\u00012\u0001"+ - "2\u00012\u00012\u00012\u00013\u00013\u00013\u00013\u00013\u00014\u0001"+ - "4\u00015\u00015\u00015\u00016\u00016\u00016\u00017\u00017\u00017\u0001"+ - "7\u00017\u00018\u00018\u00018\u00018\u00019\u00019\u00019\u00019\u0001"+ - "9\u0001:\u0001:\u0001:\u0001:\u0001:\u0001:\u0001;\u0001;\u0001;\u0001"+ - "<\u0001<\u0001=\u0001=\u0001=\u0001=\u0001=\u0001=\u0001>\u0001>\u0001"+ - "?\u0001?\u0001?\u0001?\u0001?\u0001@\u0001@\u0001@\u0001A\u0001A\u0001"+ - "A\u0001B\u0001B\u0001B\u0001C\u0001C\u0001D\u0001D\u0001D\u0001E\u0001"+ - "E\u0001F\u0001F\u0001F\u0001G\u0001G\u0001H\u0001H\u0001I\u0001I\u0001"+ - "J\u0001J\u0001K\u0001K\u0001L\u0001L\u0001L\u0001L\u0001L\u0001M\u0001"+ - "M\u0001M\u0001M\u0001M\u0001N\u0001N\u0005N\u031e\bN\nN\fN\u0321\tN\u0001"+ - "N\u0001N\u0003N\u0325\bN\u0001N\u0004N\u0328\bN\u000bN\fN\u0329\u0003"+ - "N\u032c\bN\u0001O\u0001O\u0004O\u0330\bO\u000bO\fO\u0331\u0001O\u0001"+ - "O\u0001P\u0001P\u0001Q\u0001Q\u0001Q\u0001Q\u0001R\u0001R\u0001R\u0001"+ - "R\u0001S\u0001S\u0001S\u0001S\u0001T\u0001T\u0001T\u0001T\u0001T\u0001"+ - "U\u0001U\u0001U\u0001U\u0001V\u0001V\u0001V\u0001V\u0001W\u0001W\u0001"+ - "W\u0001W\u0001X\u0001X\u0001X\u0001X\u0001Y\u0001Y\u0001Y\u0001Y\u0001"+ - "Z\u0001Z\u0001Z\u0001Z\u0001Z\u0001Z\u0001Z\u0001Z\u0001[\u0001[\u0001"+ - "[\u0001[\u0001[\u0001[\u0001[\u0001[\u0001[\u0001\\\u0001\\\u0001\\\u0003"+ - "\\\u0371\b\\\u0001]\u0004]\u0374\b]\u000b]\f]\u0375\u0001^\u0001^\u0001"+ - "^\u0001^\u0001_\u0001_\u0001_\u0001_\u0001`\u0001`\u0001`\u0001`\u0001"+ - "a\u0001a\u0001a\u0001a\u0001b\u0001b\u0001b\u0001b\u0001b\u0001c\u0001"+ - "c\u0001c\u0001c\u0001d\u0001d\u0001d\u0001d\u0001e\u0001e\u0001e\u0001"+ - "e\u0003e\u0399\be\u0001f\u0001f\u0003f\u039d\bf\u0001f\u0005f\u03a0\b"+ - "f\nf\ff\u03a3\tf\u0001f\u0001f\u0003f\u03a7\bf\u0001f\u0004f\u03aa\bf"+ - "\u000bf\ff\u03ab\u0003f\u03ae\bf\u0001g\u0001g\u0004g\u03b2\bg\u000bg"+ - "\fg\u03b3\u0001h\u0001h\u0001h\u0001h\u0001i\u0001i\u0001i\u0001i\u0001"+ - "j\u0001j\u0001j\u0001j\u0001k\u0001k\u0001k\u0001k\u0001k\u0001l\u0001"+ - "l\u0001l\u0001l\u0001m\u0001m\u0001m\u0001m\u0001n\u0001n\u0001n\u0001"+ - "n\u0001o\u0001o\u0001o\u0001p\u0001p\u0001p\u0001p\u0001q\u0001q\u0001"+ - "q\u0001q\u0001r\u0001r\u0001r\u0001r\u0001s\u0001s\u0001s\u0001s\u0001"+ - "t\u0001t\u0001t\u0001t\u0001t\u0001u\u0001u\u0001u\u0001u\u0001u\u0001"+ + "\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001"+ + "\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001"+ + "\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001"+ + "\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001"+ + "\u000f\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001"+ + "\u0010\u0001\u0010\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001"+ + "\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0012\u0001\u0012\u0001"+ + "\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001"+ + "\u0013\u0004\u0013\u020c\b\u0013\u000b\u0013\f\u0013\u020d\u0001\u0013"+ + "\u0001\u0013\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0005\u0014"+ + "\u0216\b\u0014\n\u0014\f\u0014\u0219\t\u0014\u0001\u0014\u0003\u0014\u021c"+ + "\b\u0014\u0001\u0014\u0003\u0014\u021f\b\u0014\u0001\u0014\u0001\u0014"+ + "\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0005\u0015"+ + "\u0228\b\u0015\n\u0015\f\u0015\u022b\t\u0015\u0001\u0015\u0001\u0015\u0001"+ + "\u0015\u0001\u0015\u0001\u0015\u0001\u0016\u0004\u0016\u0233\b\u0016\u000b"+ + "\u0016\f\u0016\u0234\u0001\u0016\u0001\u0016\u0001\u0017\u0001\u0017\u0001"+ + "\u0017\u0003\u0017\u023c\b\u0017\u0001\u0018\u0004\u0018\u023f\b\u0018"+ + "\u000b\u0018\f\u0018\u0240\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u0019"+ + "\u0001\u0019\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a"+ + "\u0001\u001b\u0001\u001b\u0001\u001b\u0001\u001b\u0001\u001c\u0001\u001c"+ + "\u0001\u001c\u0001\u001c\u0001\u001d\u0001\u001d\u0001\u001d\u0001\u001d"+ + "\u0001\u001e\u0001\u001e\u0001\u001e\u0001\u001e\u0001\u001f\u0001\u001f"+ + "\u0001 \u0001 \u0001!\u0001!\u0001!\u0001\"\u0001\"\u0001#\u0001#\u0003"+ + "#\u0268\b#\u0001#\u0004#\u026b\b#\u000b#\f#\u026c\u0001$\u0001$\u0001"+ + "%\u0001%\u0001&\u0001&\u0001&\u0003&\u0276\b&\u0001\'\u0001\'\u0001(\u0001"+ + "(\u0001(\u0003(\u027d\b(\u0001)\u0001)\u0001)\u0005)\u0282\b)\n)\f)\u0285"+ + "\t)\u0001)\u0001)\u0001)\u0001)\u0001)\u0001)\u0005)\u028d\b)\n)\f)\u0290"+ + "\t)\u0001)\u0001)\u0001)\u0001)\u0001)\u0003)\u0297\b)\u0001)\u0003)\u029a"+ + "\b)\u0003)\u029c\b)\u0001*\u0004*\u029f\b*\u000b*\f*\u02a0\u0001+\u0004"+ + "+\u02a4\b+\u000b+\f+\u02a5\u0001+\u0001+\u0005+\u02aa\b+\n+\f+\u02ad\t"+ + "+\u0001+\u0001+\u0004+\u02b1\b+\u000b+\f+\u02b2\u0001+\u0004+\u02b6\b"+ + "+\u000b+\f+\u02b7\u0001+\u0001+\u0005+\u02bc\b+\n+\f+\u02bf\t+\u0003+"+ + "\u02c1\b+\u0001+\u0001+\u0001+\u0001+\u0004+\u02c7\b+\u000b+\f+\u02c8"+ + "\u0001+\u0001+\u0003+\u02cd\b+\u0001,\u0001,\u0001,\u0001-\u0001-\u0001"+ + "-\u0001-\u0001.\u0001.\u0001.\u0001.\u0001/\u0001/\u00010\u00010\u0001"+ + "0\u00011\u00011\u00012\u00012\u00012\u00012\u00012\u00013\u00013\u0001"+ + "4\u00014\u00014\u00014\u00014\u00014\u00015\u00015\u00015\u00015\u0001"+ + "5\u00015\u00016\u00016\u00016\u00016\u00016\u00017\u00017\u00018\u0001"+ + "8\u00018\u00019\u00019\u00019\u0001:\u0001:\u0001:\u0001:\u0001:\u0001"+ + ";\u0001;\u0001;\u0001;\u0001<\u0001<\u0001<\u0001<\u0001<\u0001=\u0001"+ + "=\u0001=\u0001=\u0001=\u0001=\u0001>\u0001>\u0001>\u0001?\u0001?\u0001"+ + "@\u0001@\u0001@\u0001@\u0001@\u0001@\u0001A\u0001A\u0001B\u0001B\u0001"+ + "B\u0001B\u0001B\u0001C\u0001C\u0001C\u0001D\u0001D\u0001D\u0001E\u0001"+ + "E\u0001E\u0001F\u0001F\u0001G\u0001G\u0001G\u0001H\u0001H\u0001I\u0001"+ + "I\u0001I\u0001J\u0001J\u0001K\u0001K\u0001L\u0001L\u0001M\u0001M\u0001"+ + "N\u0001N\u0001O\u0001O\u0001O\u0001O\u0001O\u0001P\u0001P\u0001P\u0001"+ + "P\u0001P\u0001Q\u0001Q\u0005Q\u0350\bQ\nQ\fQ\u0353\tQ\u0001Q\u0001Q\u0003"+ + "Q\u0357\bQ\u0001Q\u0004Q\u035a\bQ\u000bQ\fQ\u035b\u0003Q\u035e\bQ\u0001"+ + "R\u0001R\u0004R\u0362\bR\u000bR\fR\u0363\u0001R\u0001R\u0001S\u0001S\u0001"+ + "T\u0001T\u0001T\u0001T\u0001U\u0001U\u0001U\u0001U\u0001V\u0001V\u0001"+ + "V\u0001V\u0001W\u0001W\u0001W\u0001W\u0001W\u0001X\u0001X\u0001X\u0001"+ + "X\u0001Y\u0001Y\u0001Y\u0001Y\u0001Z\u0001Z\u0001Z\u0001Z\u0001[\u0001"+ + "[\u0001[\u0001[\u0001\\\u0001\\\u0001\\\u0001\\\u0001]\u0001]\u0001]\u0001"+ + "]\u0001]\u0001]\u0001]\u0001]\u0001^\u0001^\u0001^\u0001^\u0001^\u0001"+ + "^\u0001^\u0001^\u0001^\u0001_\u0001_\u0001_\u0001_\u0001`\u0001`\u0001"+ + "`\u0001`\u0001a\u0001a\u0001a\u0001a\u0001b\u0001b\u0001b\u0001b\u0001"+ + "c\u0001c\u0001c\u0001c\u0001c\u0001d\u0001d\u0001d\u0001d\u0001e\u0001"+ + "e\u0001e\u0001e\u0001f\u0001f\u0001f\u0001f\u0003f\u03c1\bf\u0001g\u0001"+ + "g\u0003g\u03c5\bg\u0001g\u0005g\u03c8\bg\ng\fg\u03cb\tg\u0001g\u0001g"+ + "\u0003g\u03cf\bg\u0001g\u0004g\u03d2\bg\u000bg\fg\u03d3\u0003g\u03d6\b"+ + "g\u0001h\u0001h\u0004h\u03da\bh\u000bh\fh\u03db\u0001i\u0001i\u0001i\u0001"+ + "i\u0001j\u0001j\u0001j\u0001j\u0001k\u0001k\u0001k\u0001k\u0001l\u0001"+ + "l\u0001l\u0001l\u0001l\u0001m\u0001m\u0001m\u0001m\u0001n\u0001n\u0001"+ + "n\u0001n\u0001o\u0001o\u0001o\u0001o\u0001p\u0001p\u0001p\u0001q\u0001"+ + "q\u0001q\u0001q\u0001r\u0001r\u0001r\u0001r\u0001s\u0001s\u0001s\u0001"+ + "s\u0001t\u0001t\u0001t\u0001t\u0001u\u0001u\u0001u\u0001u\u0001u\u0001"+ "v\u0001v\u0001v\u0001v\u0001v\u0001w\u0001w\u0001w\u0001w\u0001w\u0001"+ - "w\u0001w\u0001x\u0001x\u0001y\u0004y\u03ff\by\u000by\fy\u0400\u0001y\u0001"+ - "y\u0003y\u0405\by\u0001y\u0004y\u0408\by\u000by\fy\u0409\u0001z\u0001"+ - "z\u0001z\u0001z\u0001{\u0001{\u0001{\u0001{\u0001|\u0001|\u0001|\u0001"+ - "|\u0001}\u0001}\u0001}\u0001}\u0001~\u0001~\u0001~\u0001~\u0001\u007f"+ - "\u0001\u007f\u0001\u007f\u0001\u007f\u0001\u007f\u0001\u007f\u0001\u0080"+ - "\u0001\u0080\u0001\u0080\u0001\u0080\u0001\u0081\u0001\u0081\u0001\u0081"+ - "\u0001\u0081\u0001\u0082\u0001\u0082\u0001\u0082\u0001\u0082\u0001\u0083"+ - "\u0001\u0083\u0001\u0083\u0001\u0083\u0001\u0084\u0001\u0084\u0001\u0084"+ - "\u0001\u0084\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0086"+ - "\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0087\u0001\u0087\u0001\u0087"+ - "\u0001\u0087\u0001\u0088\u0001\u0088\u0001\u0088\u0001\u0088\u0001\u0089"+ - "\u0001\u0089\u0001\u0089\u0001\u0089\u0001\u0089\u0001\u008a\u0001\u008a"+ - "\u0001\u008a\u0001\u008a\u0001\u008b\u0001\u008b\u0001\u008b\u0001\u008b"+ - "\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008d\u0001\u008d"+ - "\u0001\u008d\u0001\u008d\u0001\u008e\u0001\u008e\u0001\u008e\u0001\u008e"+ - "\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u0090\u0001\u0090"+ - "\u0001\u0090\u0001\u0090\u0001\u0090\u0001\u0091\u0001\u0091\u0001\u0091"+ - "\u0001\u0091\u0001\u0091\u0001\u0092\u0001\u0092\u0001\u0092\u0001\u0092"+ - "\u0001\u0093\u0001\u0093\u0001\u0093\u0001\u0093\u0001\u0094\u0001\u0094"+ - "\u0001\u0094\u0001\u0094\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0095"+ - "\u0001\u0095\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0096"+ - "\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0097"+ - "\u0001\u0097\u0001\u0097\u0001\u0097\u0001\u0098\u0001\u0098\u0001\u0098"+ - "\u0001\u0098\u0001\u0099\u0001\u0099\u0001\u0099\u0001\u0099\u0001\u009a"+ - "\u0001\u009a\u0001\u009a\u0001\u009a\u0001\u009a\u0001\u009b\u0001\u009b"+ - "\u0001\u009c\u0001\u009c\u0001\u009c\u0001\u009c\u0001\u009c\u0004\u009c"+ - "\u04a4\b\u009c\u000b\u009c\f\u009c\u04a5\u0001\u009d\u0001\u009d\u0001"+ - "\u009d\u0001\u009d\u0001\u009e\u0001\u009e\u0001\u009e\u0001\u009e\u0001"+ - "\u009f\u0001\u009f\u0001\u009f\u0001\u009f\u0002\u0201\u025c\u0000\u00a0"+ - "\f\u0001\u000e\u0002\u0010\u0003\u0012\u0004\u0014\u0005\u0016\u0006\u0018"+ - "\u0007\u001a\b\u001c\t\u001e\n \u000b\"\f$\r&\u000e(\u000f*\u0010,\u0011"+ - ".\u00120\u00132\u00144\u00156\u00168\u0000:\u0000<\u0017>\u0018@\u0019"+ - "B\u001aD\u0000F\u0000H\u0000J\u0000L\u0000N\u0000P\u0000R\u0000T\u0000"+ - "V\u0000X\u001bZ\u001c\\\u001d^\u001e`\u001fb d!f\"h#j$l%n&p\'r(t)v*x+"+ - "z,|-~.\u0080/\u00820\u00841\u00862\u00883\u008a4\u008c5\u008e6\u00907"+ - "\u00928\u00949\u0096:\u0098;\u009a<\u009c=\u009e>\u00a0?\u00a2@\u00a4"+ - "A\u00a6B\u00a8C\u00aa\u0000\u00acD\u00aeE\u00b0F\u00b2G\u00b4\u0000\u00b6"+ - "\u0000\u00b8\u0000\u00ba\u0000\u00bc\u0000\u00be\u0000\u00c0H\u00c2I\u00c4"+ - "\u0000\u00c6J\u00c8\u0000\u00caK\u00ccL\u00ceM\u00d0\u0000\u00d2\u0000"+ - "\u00d4\u0000\u00d6\u0000\u00d8\u0000\u00daN\u00dcO\u00deP\u00e0Q\u00e2"+ - "\u0000\u00e4\u0000\u00e6\u0000\u00e8\u0000\u00eaR\u00ec\u0000\u00eeS\u00f0"+ - "T\u00f2U\u00f4\u0000\u00f6\u0000\u00f8V\u00faW\u00fc\u0000\u00feX\u0100"+ - "\u0000\u0102\u0000\u0104Y\u0106Z\u0108[\u010a\u0000\u010c\u0000\u010e"+ - "\u0000\u0110\u0000\u0112\u0000\u0114\u0000\u0116\u0000\u0118\\\u011a]"+ - "\u011c^\u011e\u0000\u0120\u0000\u0122\u0000\u0124\u0000\u0126_\u0128`"+ - "\u012aa\u012c\u0000\u012eb\u0130c\u0132d\u0134e\u0136\u0000\u0138f\u013a"+ - "g\u013ch\u013ei\u0140\u0000\u0142j\u0144k\u0146l\u0148m\u014an\f\u0000"+ - "\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\r\u0006\u0000\t"+ - "\n\r\r //[[]]\u0002\u0000\n\n\r\r\u0003\u0000\t\n\r\r \u0001\u00000"+ - "9\u0002\u0000AZaz\u0005\u0000\"\"\\\\nnrrtt\u0004\u0000\n\n\r\r\"\"\\"+ - "\\\u0002\u0000EEee\u0002\u0000++--\u0001\u0000``\n\u0000\t\n\r\r ,,/"+ - "/==[[]]``||\u0002\u0000**//\u000b\u0000\t\n\r\r \"#,,//::<<>?\\\\||\u04ce"+ - "\u0000\f\u0001\u0000\u0000\u0000\u0000\u000e\u0001\u0000\u0000\u0000\u0000"+ + "x\u0001x\u0001x\u0001x\u0001x\u0001x\u0001x\u0001y\u0001y\u0001z\u0004"+ + "z\u0427\bz\u000bz\fz\u0428\u0001z\u0001z\u0003z\u042d\bz\u0001z\u0004"+ + "z\u0430\bz\u000bz\fz\u0431\u0001{\u0001{\u0001{\u0001{\u0001|\u0001|\u0001"+ + "|\u0001|\u0001}\u0001}\u0001}\u0001}\u0001~\u0001~\u0001~\u0001~\u0001"+ + "\u007f\u0001\u007f\u0001\u007f\u0001\u007f\u0001\u0080\u0001\u0080\u0001"+ + "\u0080\u0001\u0080\u0001\u0080\u0001\u0080\u0001\u0081\u0001\u0081\u0001"+ + "\u0081\u0001\u0081\u0001\u0082\u0001\u0082\u0001\u0082\u0001\u0082\u0001"+ + "\u0083\u0001\u0083\u0001\u0083\u0001\u0083\u0001\u0084\u0001\u0084\u0001"+ + "\u0084\u0001\u0084\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0085\u0001"+ + "\u0086\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0087\u0001\u0087\u0001"+ + "\u0087\u0001\u0087\u0001\u0088\u0001\u0088\u0001\u0088\u0001\u0088\u0001"+ + "\u0089\u0001\u0089\u0001\u0089\u0001\u0089\u0001\u008a\u0001\u008a\u0001"+ + "\u008a\u0001\u008a\u0001\u008a\u0001\u008b\u0001\u008b\u0001\u008b\u0001"+ + "\u008b\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008d\u0001"+ + "\u008d\u0001\u008d\u0001\u008d\u0001\u008e\u0001\u008e\u0001\u008e\u0001"+ + "\u008e\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u0090\u0001"+ + "\u0090\u0001\u0090\u0001\u0090\u0001\u0091\u0001\u0091\u0001\u0091\u0001"+ + "\u0091\u0001\u0091\u0001\u0092\u0001\u0092\u0001\u0092\u0001\u0092\u0001"+ + "\u0092\u0001\u0093\u0001\u0093\u0001\u0093\u0001\u0093\u0001\u0094\u0001"+ + "\u0094\u0001\u0094\u0001\u0094\u0001\u0095\u0001\u0095\u0001\u0095\u0001"+ + "\u0095\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0096\u0001"+ + "\u0097\u0001\u0097\u0001\u0097\u0001\u0097\u0001\u0097\u0001\u0097\u0001"+ + "\u0097\u0001\u0097\u0001\u0097\u0001\u0097\u0001\u0098\u0001\u0098\u0001"+ + "\u0098\u0001\u0098\u0001\u0099\u0001\u0099\u0001\u0099\u0001\u0099\u0001"+ + "\u009a\u0001\u009a\u0001\u009a\u0001\u009a\u0001\u009b\u0001\u009b\u0001"+ + "\u009b\u0001\u009b\u0001\u009b\u0001\u009c\u0001\u009c\u0001\u009d\u0001"+ + "\u009d\u0001\u009d\u0001\u009d\u0001\u009d\u0004\u009d\u04cc\b\u009d\u000b"+ + "\u009d\f\u009d\u04cd\u0001\u009e\u0001\u009e\u0001\u009e\u0001\u009e\u0001"+ + "\u009f\u0001\u009f\u0001\u009f\u0001\u009f\u0001\u00a0\u0001\u00a0\u0001"+ + "\u00a0\u0001\u00a0\u0001\u00a1\u0001\u00a1\u0001\u00a1\u0001\u00a1\u0001"+ + "\u00a1\u0001\u00a2\u0001\u00a2\u0001\u00a2\u0001\u00a2\u0001\u00a2\u0001"+ + "\u00a2\u0001\u00a3\u0001\u00a3\u0001\u00a3\u0001\u00a3\u0001\u00a4\u0001"+ + "\u00a4\u0001\u00a4\u0001\u00a4\u0001\u00a5\u0001\u00a5\u0001\u00a5\u0001"+ + "\u00a5\u0001\u00a6\u0001\u00a6\u0001\u00a6\u0001\u00a6\u0001\u00a6\u0001"+ + "\u00a6\u0001\u00a7\u0001\u00a7\u0001\u00a7\u0001\u00a7\u0001\u00a8\u0001"+ + "\u00a8\u0001\u00a8\u0001\u00a8\u0001\u00a9\u0001\u00a9\u0001\u00a9\u0001"+ + "\u00a9\u0001\u00aa\u0001\u00aa\u0001\u00aa\u0001\u00aa\u0001\u00aa\u0001"+ + "\u00aa\u0001\u00ab\u0001\u00ab\u0001\u00ab\u0001\u00ab\u0001\u00ab\u0001"+ + "\u00ab\u0001\u00ac\u0001\u00ac\u0001\u00ac\u0001\u00ac\u0001\u00ac\u0001"+ + "\u00ac\u0001\u00ad\u0001\u00ad\u0001\u00ad\u0001\u00ad\u0001\u00ad\u0002"+ + "\u0229\u028e\u0000\u00ae\u000e\u0001\u0010\u0002\u0012\u0003\u0014\u0004"+ + "\u0016\u0005\u0018\u0006\u001a\u0007\u001c\b\u001e\t \n\"\u000b$\f&\r"+ + "(\u000e*\u000f,\u0010.\u00110\u00122\u00134\u00146\u00158\u0016:\u0017"+ + "<\u0000>\u0018@\u0000B\u0000D\u0019F\u001aH\u001bJ\u001cL\u0000N\u0000"+ + "P\u0000R\u0000T\u0000V\u0000X\u0000Z\u0000\\\u0000^\u0000`\u001db\u001e"+ + "d\u001ff h!j\"l#n$p%r&t\'v(x)z*|+~,\u0080-\u0082.\u0084/\u00860\u0088"+ + "1\u008a2\u008c3\u008e4\u00905\u00926\u00947\u00968\u00989\u009a:\u009c"+ + ";\u009e<\u00a0=\u00a2>\u00a4?\u00a6@\u00a8A\u00aaB\u00acC\u00aeD\u00b0"+ + "E\u00b2\u0000\u00b4F\u00b6G\u00b8H\u00baI\u00bc\u0000\u00be\u0000\u00c0"+ + "\u0000\u00c2\u0000\u00c4\u0000\u00c6\u0000\u00c8J\u00caK\u00cc\u0000\u00ce"+ + "L\u00d0M\u00d2N\u00d4\u0000\u00d6\u0000\u00d8\u0000\u00da\u0000\u00dc"+ + "\u0000\u00deO\u00e0P\u00e2Q\u00e4R\u00e6\u0000\u00e8\u0000\u00ea\u0000"+ + "\u00ec\u0000\u00eeS\u00f0\u0000\u00f2T\u00f4U\u00f6V\u00f8\u0000\u00fa"+ + "\u0000\u00fcW\u00feX\u0100\u0000\u0102Y\u0104\u0000\u0106\u0000\u0108"+ + "Z\u010a[\u010c\\\u010e\u0000\u0110\u0000\u0112\u0000\u0114\u0000\u0116"+ + "\u0000\u0118\u0000\u011a\u0000\u011c]\u011e^\u0120_\u0122\u0000\u0124"+ + "\u0000\u0126\u0000\u0128\u0000\u012a`\u012ca\u012eb\u0130\u0000\u0132"+ + "c\u0134d\u0136e\u0138f\u013a\u0000\u013cg\u013eh\u0140i\u0142j\u0144\u0000"+ + "\u0146k\u0148l\u014am\u014cn\u014eo\u0150\u0000\u0152\u0000\u0154p\u0156"+ + "q\u0158r\u015a\u0000\u015cs\u015et\u0160u\u0162\u0000\u0164\u0000\u0166"+ + "\u0000\u0168\u0000\u000e\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007"+ + "\b\t\n\u000b\f\r\r\u0006\u0000\t\n\r\r //[[]]\u0002\u0000\n\n\r\r\u0003"+ + "\u0000\t\n\r\r \n\u0000\t\n\r\r ,,//==[[]]``||\u0002\u0000**//\u0001"+ + "\u000009\u0002\u0000AZaz\u0005\u0000\"\"\\\\nnrrtt\u0004\u0000\n\n\r\r"+ + "\"\"\\\\\u0002\u0000EEee\u0002\u0000++--\u0001\u0000``\u000b\u0000\t\n"+ + "\r\r \"#,,//::<<>?\\\\||\u0534\u0000\u000e\u0001\u0000\u0000\u0000\u0000"+ "\u0010\u0001\u0000\u0000\u0000\u0000\u0012\u0001\u0000\u0000\u0000\u0000"+ "\u0014\u0001\u0000\u0000\u0000\u0000\u0016\u0001\u0000\u0000\u0000\u0000"+ "\u0018\u0001\u0000\u0000\u0000\u0000\u001a\u0001\u0000\u0000\u0000\u0000"+ @@ -379,575 +408,625 @@ public EsqlBaseLexer(CharStream input) { "\u0000\u0000*\u0001\u0000\u0000\u0000\u0000,\u0001\u0000\u0000\u0000\u0000"+ ".\u0001\u0000\u0000\u0000\u00000\u0001\u0000\u0000\u0000\u00002\u0001"+ "\u0000\u0000\u0000\u00004\u0001\u0000\u0000\u0000\u00006\u0001\u0000\u0000"+ - "\u0000\u00018\u0001\u0000\u0000\u0000\u0001:\u0001\u0000\u0000\u0000\u0001"+ - "<\u0001\u0000\u0000\u0000\u0001>\u0001\u0000\u0000\u0000\u0001@\u0001"+ - "\u0000\u0000\u0000\u0002B\u0001\u0000\u0000\u0000\u0002X\u0001\u0000\u0000"+ - "\u0000\u0002Z\u0001\u0000\u0000\u0000\u0002\\\u0001\u0000\u0000\u0000"+ - "\u0002^\u0001\u0000\u0000\u0000\u0002`\u0001\u0000\u0000\u0000\u0002b"+ - "\u0001\u0000\u0000\u0000\u0002d\u0001\u0000\u0000\u0000\u0002f\u0001\u0000"+ - "\u0000\u0000\u0002h\u0001\u0000\u0000\u0000\u0002j\u0001\u0000\u0000\u0000"+ - "\u0002l\u0001\u0000\u0000\u0000\u0002n\u0001\u0000\u0000\u0000\u0002p"+ - "\u0001\u0000\u0000\u0000\u0002r\u0001\u0000\u0000\u0000\u0002t\u0001\u0000"+ - "\u0000\u0000\u0002v\u0001\u0000\u0000\u0000\u0002x\u0001\u0000\u0000\u0000"+ - "\u0002z\u0001\u0000\u0000\u0000\u0002|\u0001\u0000\u0000\u0000\u0002~"+ - "\u0001\u0000\u0000\u0000\u0002\u0080\u0001\u0000\u0000\u0000\u0002\u0082"+ - "\u0001\u0000\u0000\u0000\u0002\u0084\u0001\u0000\u0000\u0000\u0002\u0086"+ - "\u0001\u0000\u0000\u0000\u0002\u0088\u0001\u0000\u0000\u0000\u0002\u008a"+ - "\u0001\u0000\u0000\u0000\u0002\u008c\u0001\u0000\u0000\u0000\u0002\u008e"+ - "\u0001\u0000\u0000\u0000\u0002\u0090\u0001\u0000\u0000\u0000\u0002\u0092"+ - "\u0001\u0000\u0000\u0000\u0002\u0094\u0001\u0000\u0000\u0000\u0002\u0096"+ - "\u0001\u0000\u0000\u0000\u0002\u0098\u0001\u0000\u0000\u0000\u0002\u009a"+ - "\u0001\u0000\u0000\u0000\u0002\u009c\u0001\u0000\u0000\u0000\u0002\u009e"+ - "\u0001\u0000\u0000\u0000\u0002\u00a0\u0001\u0000\u0000\u0000\u0002\u00a2"+ - "\u0001\u0000\u0000\u0000\u0002\u00a4\u0001\u0000\u0000\u0000\u0002\u00a6"+ - "\u0001\u0000\u0000\u0000\u0002\u00a8\u0001\u0000\u0000\u0000\u0002\u00ac"+ - "\u0001\u0000\u0000\u0000\u0002\u00ae\u0001\u0000\u0000\u0000\u0002\u00b0"+ - "\u0001\u0000\u0000\u0000\u0002\u00b2\u0001\u0000\u0000\u0000\u0003\u00b4"+ - "\u0001\u0000\u0000\u0000\u0003\u00b6\u0001\u0000\u0000\u0000\u0003\u00b8"+ - "\u0001\u0000\u0000\u0000\u0003\u00ba\u0001\u0000\u0000\u0000\u0003\u00bc"+ - "\u0001\u0000\u0000\u0000\u0003\u00be\u0001\u0000\u0000\u0000\u0003\u00c0"+ - "\u0001\u0000\u0000\u0000\u0003\u00c2\u0001\u0000\u0000\u0000\u0003\u00c6"+ - "\u0001\u0000\u0000\u0000\u0003\u00c8\u0001\u0000\u0000\u0000\u0003\u00ca"+ - "\u0001\u0000\u0000\u0000\u0003\u00cc\u0001\u0000\u0000\u0000\u0003\u00ce"+ - "\u0001\u0000\u0000\u0000\u0004\u00d0\u0001\u0000\u0000\u0000\u0004\u00d2"+ - "\u0001\u0000\u0000\u0000\u0004\u00d4\u0001\u0000\u0000\u0000\u0004\u00da"+ - "\u0001\u0000\u0000\u0000\u0004\u00dc\u0001\u0000\u0000\u0000\u0004\u00de"+ - "\u0001\u0000\u0000\u0000\u0004\u00e0\u0001\u0000\u0000\u0000\u0005\u00e2"+ - "\u0001\u0000\u0000\u0000\u0005\u00e4\u0001\u0000\u0000\u0000\u0005\u00e6"+ - "\u0001\u0000\u0000\u0000\u0005\u00e8\u0001\u0000\u0000\u0000\u0005\u00ea"+ - "\u0001\u0000\u0000\u0000\u0005\u00ec\u0001\u0000\u0000\u0000\u0005\u00ee"+ - "\u0001\u0000\u0000\u0000\u0005\u00f0\u0001\u0000\u0000\u0000\u0005\u00f2"+ - "\u0001\u0000\u0000\u0000\u0006\u00f4\u0001\u0000\u0000\u0000\u0006\u00f6"+ - "\u0001\u0000\u0000\u0000\u0006\u00f8\u0001\u0000\u0000\u0000\u0006\u00fa"+ - "\u0001\u0000\u0000\u0000\u0006\u00fe\u0001\u0000\u0000\u0000\u0006\u0100"+ - "\u0001\u0000\u0000\u0000\u0006\u0102\u0001\u0000\u0000\u0000\u0006\u0104"+ - "\u0001\u0000\u0000\u0000\u0006\u0106\u0001\u0000\u0000\u0000\u0006\u0108"+ - "\u0001\u0000\u0000\u0000\u0007\u010a\u0001\u0000\u0000\u0000\u0007\u010c"+ - "\u0001\u0000\u0000\u0000\u0007\u010e\u0001\u0000\u0000\u0000\u0007\u0110"+ - "\u0001\u0000\u0000\u0000\u0007\u0112\u0001\u0000\u0000\u0000\u0007\u0114"+ - "\u0001\u0000\u0000\u0000\u0007\u0116\u0001\u0000\u0000\u0000\u0007\u0118"+ - "\u0001\u0000\u0000\u0000\u0007\u011a\u0001\u0000\u0000\u0000\u0007\u011c"+ - "\u0001\u0000\u0000\u0000\b\u011e\u0001\u0000\u0000\u0000\b\u0120\u0001"+ + "\u0000\u00008\u0001\u0000\u0000\u0000\u0000:\u0001\u0000\u0000\u0000\u0000"+ + ">\u0001\u0000\u0000\u0000\u0001@\u0001\u0000\u0000\u0000\u0001B\u0001"+ + "\u0000\u0000\u0000\u0001D\u0001\u0000\u0000\u0000\u0001F\u0001\u0000\u0000"+ + "\u0000\u0001H\u0001\u0000\u0000\u0000\u0002J\u0001\u0000\u0000\u0000\u0002"+ + "`\u0001\u0000\u0000\u0000\u0002b\u0001\u0000\u0000\u0000\u0002d\u0001"+ + "\u0000\u0000\u0000\u0002f\u0001\u0000\u0000\u0000\u0002h\u0001\u0000\u0000"+ + "\u0000\u0002j\u0001\u0000\u0000\u0000\u0002l\u0001\u0000\u0000\u0000\u0002"+ + "n\u0001\u0000\u0000\u0000\u0002p\u0001\u0000\u0000\u0000\u0002r\u0001"+ + "\u0000\u0000\u0000\u0002t\u0001\u0000\u0000\u0000\u0002v\u0001\u0000\u0000"+ + "\u0000\u0002x\u0001\u0000\u0000\u0000\u0002z\u0001\u0000\u0000\u0000\u0002"+ + "|\u0001\u0000\u0000\u0000\u0002~\u0001\u0000\u0000\u0000\u0002\u0080\u0001"+ + "\u0000\u0000\u0000\u0002\u0082\u0001\u0000\u0000\u0000\u0002\u0084\u0001"+ + "\u0000\u0000\u0000\u0002\u0086\u0001\u0000\u0000\u0000\u0002\u0088\u0001"+ + "\u0000\u0000\u0000\u0002\u008a\u0001\u0000\u0000\u0000\u0002\u008c\u0001"+ + "\u0000\u0000\u0000\u0002\u008e\u0001\u0000\u0000\u0000\u0002\u0090\u0001"+ + "\u0000\u0000\u0000\u0002\u0092\u0001\u0000\u0000\u0000\u0002\u0094\u0001"+ + "\u0000\u0000\u0000\u0002\u0096\u0001\u0000\u0000\u0000\u0002\u0098\u0001"+ + "\u0000\u0000\u0000\u0002\u009a\u0001\u0000\u0000\u0000\u0002\u009c\u0001"+ + "\u0000\u0000\u0000\u0002\u009e\u0001\u0000\u0000\u0000\u0002\u00a0\u0001"+ + "\u0000\u0000\u0000\u0002\u00a2\u0001\u0000\u0000\u0000\u0002\u00a4\u0001"+ + "\u0000\u0000\u0000\u0002\u00a6\u0001\u0000\u0000\u0000\u0002\u00a8\u0001"+ + "\u0000\u0000\u0000\u0002\u00aa\u0001\u0000\u0000\u0000\u0002\u00ac\u0001"+ + "\u0000\u0000\u0000\u0002\u00ae\u0001\u0000\u0000\u0000\u0002\u00b0\u0001"+ + "\u0000\u0000\u0000\u0002\u00b4\u0001\u0000\u0000\u0000\u0002\u00b6\u0001"+ + "\u0000\u0000\u0000\u0002\u00b8\u0001\u0000\u0000\u0000\u0002\u00ba\u0001"+ + "\u0000\u0000\u0000\u0003\u00bc\u0001\u0000\u0000\u0000\u0003\u00be\u0001"+ + "\u0000\u0000\u0000\u0003\u00c0\u0001\u0000\u0000\u0000\u0003\u00c2\u0001"+ + "\u0000\u0000\u0000\u0003\u00c4\u0001\u0000\u0000\u0000\u0003\u00c6\u0001"+ + "\u0000\u0000\u0000\u0003\u00c8\u0001\u0000\u0000\u0000\u0003\u00ca\u0001"+ + "\u0000\u0000\u0000\u0003\u00cc\u0001\u0000\u0000\u0000\u0003\u00ce\u0001"+ + "\u0000\u0000\u0000\u0003\u00d0\u0001\u0000\u0000\u0000\u0003\u00d2\u0001"+ + "\u0000\u0000\u0000\u0004\u00d4\u0001\u0000\u0000\u0000\u0004\u00d6\u0001"+ + "\u0000\u0000\u0000\u0004\u00d8\u0001\u0000\u0000\u0000\u0004\u00de\u0001"+ + "\u0000\u0000\u0000\u0004\u00e0\u0001\u0000\u0000\u0000\u0004\u00e2\u0001"+ + "\u0000\u0000\u0000\u0004\u00e4\u0001\u0000\u0000\u0000\u0005\u00e6\u0001"+ + "\u0000\u0000\u0000\u0005\u00e8\u0001\u0000\u0000\u0000\u0005\u00ea\u0001"+ + "\u0000\u0000\u0000\u0005\u00ec\u0001\u0000\u0000\u0000\u0005\u00ee\u0001"+ + "\u0000\u0000\u0000\u0005\u00f0\u0001\u0000\u0000\u0000\u0005\u00f2\u0001"+ + "\u0000\u0000\u0000\u0005\u00f4\u0001\u0000\u0000\u0000\u0005\u00f6\u0001"+ + "\u0000\u0000\u0000\u0006\u00f8\u0001\u0000\u0000\u0000\u0006\u00fa\u0001"+ + "\u0000\u0000\u0000\u0006\u00fc\u0001\u0000\u0000\u0000\u0006\u00fe\u0001"+ + "\u0000\u0000\u0000\u0006\u0102\u0001\u0000\u0000\u0000\u0006\u0104\u0001"+ + "\u0000\u0000\u0000\u0006\u0106\u0001\u0000\u0000\u0000\u0006\u0108\u0001"+ + "\u0000\u0000\u0000\u0006\u010a\u0001\u0000\u0000\u0000\u0006\u010c\u0001"+ + "\u0000\u0000\u0000\u0007\u010e\u0001\u0000\u0000\u0000\u0007\u0110\u0001"+ + "\u0000\u0000\u0000\u0007\u0112\u0001\u0000\u0000\u0000\u0007\u0114\u0001"+ + "\u0000\u0000\u0000\u0007\u0116\u0001\u0000\u0000\u0000\u0007\u0118\u0001"+ + "\u0000\u0000\u0000\u0007\u011a\u0001\u0000\u0000\u0000\u0007\u011c\u0001"+ + "\u0000\u0000\u0000\u0007\u011e\u0001\u0000\u0000\u0000\u0007\u0120\u0001"+ "\u0000\u0000\u0000\b\u0122\u0001\u0000\u0000\u0000\b\u0124\u0001\u0000"+ "\u0000\u0000\b\u0126\u0001\u0000\u0000\u0000\b\u0128\u0001\u0000\u0000"+ - "\u0000\b\u012a\u0001\u0000\u0000\u0000\t\u012c\u0001\u0000\u0000\u0000"+ - "\t\u012e\u0001\u0000\u0000\u0000\t\u0130\u0001\u0000\u0000\u0000\t\u0132"+ - "\u0001\u0000\u0000\u0000\t\u0134\u0001\u0000\u0000\u0000\n\u0136\u0001"+ - "\u0000\u0000\u0000\n\u0138\u0001\u0000\u0000\u0000\n\u013a\u0001\u0000"+ + "\u0000\b\u012a\u0001\u0000\u0000\u0000\b\u012c\u0001\u0000\u0000\u0000"+ + "\b\u012e\u0001\u0000\u0000\u0000\t\u0130\u0001\u0000\u0000\u0000\t\u0132"+ + "\u0001\u0000\u0000\u0000\t\u0134\u0001\u0000\u0000\u0000\t\u0136\u0001"+ + "\u0000\u0000\u0000\t\u0138\u0001\u0000\u0000\u0000\n\u013a\u0001\u0000"+ "\u0000\u0000\n\u013c\u0001\u0000\u0000\u0000\n\u013e\u0001\u0000\u0000"+ - "\u0000\u000b\u0140\u0001\u0000\u0000\u0000\u000b\u0142\u0001\u0000\u0000"+ - "\u0000\u000b\u0144\u0001\u0000\u0000\u0000\u000b\u0146\u0001\u0000\u0000"+ - "\u0000\u000b\u0148\u0001\u0000\u0000\u0000\u000b\u014a\u0001\u0000\u0000"+ - "\u0000\f\u014c\u0001\u0000\u0000\u0000\u000e\u0156\u0001\u0000\u0000\u0000"+ - "\u0010\u015d\u0001\u0000\u0000\u0000\u0012\u0166\u0001\u0000\u0000\u0000"+ - "\u0014\u016d\u0001\u0000\u0000\u0000\u0016\u0177\u0001\u0000\u0000\u0000"+ - "\u0018\u017e\u0001\u0000\u0000\u0000\u001a\u0185\u0001\u0000\u0000\u0000"+ - "\u001c\u0193\u0001\u0000\u0000\u0000\u001e\u019a\u0001\u0000\u0000\u0000"+ - " \u01a2\u0001\u0000\u0000\u0000\"\u01a9\u0001\u0000\u0000\u0000$\u01b5"+ - "\u0001\u0000\u0000\u0000&\u01be\u0001\u0000\u0000\u0000(\u01c4\u0001\u0000"+ - "\u0000\u0000*\u01cb\u0001\u0000\u0000\u0000,\u01d2\u0001\u0000\u0000\u0000"+ - ".\u01da\u0001\u0000\u0000\u00000\u01e3\u0001\u0000\u0000\u00002\u01e9"+ - "\u0001\u0000\u0000\u00004\u01fa\u0001\u0000\u0000\u00006\u020a\u0001\u0000"+ - "\u0000\u00008\u0210\u0001\u0000\u0000\u0000:\u0215\u0001\u0000\u0000\u0000"+ - "<\u021a\u0001\u0000\u0000\u0000>\u021e\u0001\u0000\u0000\u0000@\u0222"+ - "\u0001\u0000\u0000\u0000B\u0226\u0001\u0000\u0000\u0000D\u022a\u0001\u0000"+ - "\u0000\u0000F\u022c\u0001\u0000\u0000\u0000H\u022e\u0001\u0000\u0000\u0000"+ - "J\u0231\u0001\u0000\u0000\u0000L\u0233\u0001\u0000\u0000\u0000N\u023c"+ - "\u0001\u0000\u0000\u0000P\u023e\u0001\u0000\u0000\u0000R\u0243\u0001\u0000"+ - "\u0000\u0000T\u0245\u0001\u0000\u0000\u0000V\u024a\u0001\u0000\u0000\u0000"+ - "X\u0269\u0001\u0000\u0000\u0000Z\u026c\u0001\u0000\u0000\u0000\\\u029a"+ - "\u0001\u0000\u0000\u0000^\u029c\u0001\u0000\u0000\u0000`\u029f\u0001\u0000"+ - "\u0000\u0000b\u02a3\u0001\u0000\u0000\u0000d\u02a7\u0001\u0000\u0000\u0000"+ - "f\u02a9\u0001\u0000\u0000\u0000h\u02ac\u0001\u0000\u0000\u0000j\u02ae"+ - "\u0001\u0000\u0000\u0000l\u02b3\u0001\u0000\u0000\u0000n\u02b5\u0001\u0000"+ - "\u0000\u0000p\u02bb\u0001\u0000\u0000\u0000r\u02c1\u0001\u0000\u0000\u0000"+ - "t\u02c6\u0001\u0000\u0000\u0000v\u02c8\u0001\u0000\u0000\u0000x\u02cb"+ - "\u0001\u0000\u0000\u0000z\u02ce\u0001\u0000\u0000\u0000|\u02d3\u0001\u0000"+ - "\u0000\u0000~\u02d7\u0001\u0000\u0000\u0000\u0080\u02dc\u0001\u0000\u0000"+ - "\u0000\u0082\u02e2\u0001\u0000\u0000\u0000\u0084\u02e5\u0001\u0000\u0000"+ - "\u0000\u0086\u02e7\u0001\u0000\u0000\u0000\u0088\u02ed\u0001\u0000\u0000"+ - "\u0000\u008a\u02ef\u0001\u0000\u0000\u0000\u008c\u02f4\u0001\u0000\u0000"+ - "\u0000\u008e\u02f7\u0001\u0000\u0000\u0000\u0090\u02fa\u0001\u0000\u0000"+ - "\u0000\u0092\u02fd\u0001\u0000\u0000\u0000\u0094\u02ff\u0001\u0000\u0000"+ - "\u0000\u0096\u0302\u0001\u0000\u0000\u0000\u0098\u0304\u0001\u0000\u0000"+ - "\u0000\u009a\u0307\u0001\u0000\u0000\u0000\u009c\u0309\u0001\u0000\u0000"+ - "\u0000\u009e\u030b\u0001\u0000\u0000\u0000\u00a0\u030d\u0001\u0000\u0000"+ - "\u0000\u00a2\u030f\u0001\u0000\u0000\u0000\u00a4\u0311\u0001\u0000\u0000"+ - "\u0000\u00a6\u0316\u0001\u0000\u0000\u0000\u00a8\u032b\u0001\u0000\u0000"+ - "\u0000\u00aa\u032d\u0001\u0000\u0000\u0000\u00ac\u0335\u0001\u0000\u0000"+ - "\u0000\u00ae\u0337\u0001\u0000\u0000\u0000\u00b0\u033b\u0001\u0000\u0000"+ - "\u0000\u00b2\u033f\u0001\u0000\u0000\u0000\u00b4\u0343\u0001\u0000\u0000"+ - "\u0000\u00b6\u0348\u0001\u0000\u0000\u0000\u00b8\u034c\u0001\u0000\u0000"+ - "\u0000\u00ba\u0350\u0001\u0000\u0000\u0000\u00bc\u0354\u0001\u0000\u0000"+ - "\u0000\u00be\u0358\u0001\u0000\u0000\u0000\u00c0\u035c\u0001\u0000\u0000"+ - "\u0000\u00c2\u0364\u0001\u0000\u0000\u0000\u00c4\u0370\u0001\u0000\u0000"+ - "\u0000\u00c6\u0373\u0001\u0000\u0000\u0000\u00c8\u0377\u0001\u0000\u0000"+ - "\u0000\u00ca\u037b\u0001\u0000\u0000\u0000\u00cc\u037f\u0001\u0000\u0000"+ - "\u0000\u00ce\u0383\u0001\u0000\u0000\u0000\u00d0\u0387\u0001\u0000\u0000"+ - "\u0000\u00d2\u038c\u0001\u0000\u0000\u0000\u00d4\u0390\u0001\u0000\u0000"+ - "\u0000\u00d6\u0398\u0001\u0000\u0000\u0000\u00d8\u03ad\u0001\u0000\u0000"+ - "\u0000\u00da\u03b1\u0001\u0000\u0000\u0000\u00dc\u03b5\u0001\u0000\u0000"+ - "\u0000\u00de\u03b9\u0001\u0000\u0000\u0000\u00e0\u03bd\u0001\u0000\u0000"+ - "\u0000\u00e2\u03c1\u0001\u0000\u0000\u0000\u00e4\u03c6\u0001\u0000\u0000"+ - "\u0000\u00e6\u03ca\u0001\u0000\u0000\u0000\u00e8\u03ce\u0001\u0000\u0000"+ - "\u0000\u00ea\u03d2\u0001\u0000\u0000\u0000\u00ec\u03d5\u0001\u0000\u0000"+ - "\u0000\u00ee\u03d9\u0001\u0000\u0000\u0000\u00f0\u03dd\u0001\u0000\u0000"+ - "\u0000\u00f2\u03e1\u0001\u0000\u0000\u0000\u00f4\u03e5\u0001\u0000\u0000"+ - "\u0000\u00f6\u03ea\u0001\u0000\u0000\u0000\u00f8\u03ef\u0001\u0000\u0000"+ - "\u0000\u00fa\u03f4\u0001\u0000\u0000\u0000\u00fc\u03fb\u0001\u0000\u0000"+ - "\u0000\u00fe\u0404\u0001\u0000\u0000\u0000\u0100\u040b\u0001\u0000\u0000"+ - "\u0000\u0102\u040f\u0001\u0000\u0000\u0000\u0104\u0413\u0001\u0000\u0000"+ - "\u0000\u0106\u0417\u0001\u0000\u0000\u0000\u0108\u041b\u0001\u0000\u0000"+ - "\u0000\u010a\u041f\u0001\u0000\u0000\u0000\u010c\u0425\u0001\u0000\u0000"+ - "\u0000\u010e\u0429\u0001\u0000\u0000\u0000\u0110\u042d\u0001\u0000\u0000"+ - "\u0000\u0112\u0431\u0001\u0000\u0000\u0000\u0114\u0435\u0001\u0000\u0000"+ - "\u0000\u0116\u0439\u0001\u0000\u0000\u0000\u0118\u043d\u0001\u0000\u0000"+ - "\u0000\u011a\u0441\u0001\u0000\u0000\u0000\u011c\u0445\u0001\u0000\u0000"+ - "\u0000\u011e\u0449\u0001\u0000\u0000\u0000\u0120\u044e\u0001\u0000\u0000"+ - "\u0000\u0122\u0452\u0001\u0000\u0000\u0000\u0124\u0456\u0001\u0000\u0000"+ - "\u0000\u0126\u045a\u0001\u0000\u0000\u0000\u0128\u045e\u0001\u0000\u0000"+ - "\u0000\u012a\u0462\u0001\u0000\u0000\u0000\u012c\u0466\u0001\u0000\u0000"+ - "\u0000\u012e\u046b\u0001\u0000\u0000\u0000\u0130\u0470\u0001\u0000\u0000"+ - "\u0000\u0132\u0474\u0001\u0000\u0000\u0000\u0134\u0478\u0001\u0000\u0000"+ - "\u0000\u0136\u047c\u0001\u0000\u0000\u0000\u0138\u0481\u0001\u0000\u0000"+ - "\u0000\u013a\u048b\u0001\u0000\u0000\u0000\u013c\u048f\u0001\u0000\u0000"+ - "\u0000\u013e\u0493\u0001\u0000\u0000\u0000\u0140\u0497\u0001\u0000\u0000"+ - "\u0000\u0142\u049c\u0001\u0000\u0000\u0000\u0144\u04a3\u0001\u0000\u0000"+ - "\u0000\u0146\u04a7\u0001\u0000\u0000\u0000\u0148\u04ab\u0001\u0000\u0000"+ - "\u0000\u014a\u04af\u0001\u0000\u0000\u0000\u014c\u014d\u0005d\u0000\u0000"+ - "\u014d\u014e\u0005i\u0000\u0000\u014e\u014f\u0005s\u0000\u0000\u014f\u0150"+ - "\u0005s\u0000\u0000\u0150\u0151\u0005e\u0000\u0000\u0151\u0152\u0005c"+ - "\u0000\u0000\u0152\u0153\u0005t\u0000\u0000\u0153\u0154\u0001\u0000\u0000"+ - "\u0000\u0154\u0155\u0006\u0000\u0000\u0000\u0155\r\u0001\u0000\u0000\u0000"+ - "\u0156\u0157\u0005d\u0000\u0000\u0157\u0158\u0005r\u0000\u0000\u0158\u0159"+ - "\u0005o\u0000\u0000\u0159\u015a\u0005p\u0000\u0000\u015a\u015b\u0001\u0000"+ - "\u0000\u0000\u015b\u015c\u0006\u0001\u0001\u0000\u015c\u000f\u0001\u0000"+ - "\u0000\u0000\u015d\u015e\u0005e\u0000\u0000\u015e\u015f\u0005n\u0000\u0000"+ - "\u015f\u0160\u0005r\u0000\u0000\u0160\u0161\u0005i\u0000\u0000\u0161\u0162"+ - "\u0005c\u0000\u0000\u0162\u0163\u0005h\u0000\u0000\u0163\u0164\u0001\u0000"+ - "\u0000\u0000\u0164\u0165\u0006\u0002\u0002\u0000\u0165\u0011\u0001\u0000"+ - "\u0000\u0000\u0166\u0167\u0005e\u0000\u0000\u0167\u0168\u0005v\u0000\u0000"+ - "\u0168\u0169\u0005a\u0000\u0000\u0169\u016a\u0005l\u0000\u0000\u016a\u016b"+ - "\u0001\u0000\u0000\u0000\u016b\u016c\u0006\u0003\u0000\u0000\u016c\u0013"+ - "\u0001\u0000\u0000\u0000\u016d\u016e\u0005e\u0000\u0000\u016e\u016f\u0005"+ - "x\u0000\u0000\u016f\u0170\u0005p\u0000\u0000\u0170\u0171\u0005l\u0000"+ - "\u0000\u0171\u0172\u0005a\u0000\u0000\u0172\u0173\u0005i\u0000\u0000\u0173"+ - "\u0174\u0005n\u0000\u0000\u0174\u0175\u0001\u0000\u0000\u0000\u0175\u0176"+ - "\u0006\u0004\u0003\u0000\u0176\u0015\u0001\u0000\u0000\u0000\u0177\u0178"+ - "\u0005f\u0000\u0000\u0178\u0179\u0005r\u0000\u0000\u0179\u017a\u0005o"+ - "\u0000\u0000\u017a\u017b\u0005m\u0000\u0000\u017b\u017c\u0001\u0000\u0000"+ - "\u0000\u017c\u017d\u0006\u0005\u0004\u0000\u017d\u0017\u0001\u0000\u0000"+ - "\u0000\u017e\u017f\u0005g\u0000\u0000\u017f\u0180\u0005r\u0000\u0000\u0180"+ - "\u0181\u0005o\u0000\u0000\u0181\u0182\u0005k\u0000\u0000\u0182\u0183\u0001"+ - "\u0000\u0000\u0000\u0183\u0184\u0006\u0006\u0000\u0000\u0184\u0019\u0001"+ - "\u0000\u0000\u0000\u0185\u0186\u0005i\u0000\u0000\u0186\u0187\u0005n\u0000"+ - "\u0000\u0187\u0188\u0005l\u0000\u0000\u0188\u0189\u0005i\u0000\u0000\u0189"+ - "\u018a\u0005n\u0000\u0000\u018a\u018b\u0005e\u0000\u0000\u018b\u018c\u0005"+ - "s\u0000\u0000\u018c\u018d\u0005t\u0000\u0000\u018d\u018e\u0005a\u0000"+ - "\u0000\u018e\u018f\u0005t\u0000\u0000\u018f\u0190\u0005s\u0000\u0000\u0190"+ - "\u0191\u0001\u0000\u0000\u0000\u0191\u0192\u0006\u0007\u0000\u0000\u0192"+ - "\u001b\u0001\u0000\u0000\u0000\u0193\u0194\u0005k\u0000\u0000\u0194\u0195"+ - "\u0005e\u0000\u0000\u0195\u0196\u0005e\u0000\u0000\u0196\u0197\u0005p"+ - "\u0000\u0000\u0197\u0198\u0001\u0000\u0000\u0000\u0198\u0199\u0006\b\u0001"+ - "\u0000\u0199\u001d\u0001\u0000\u0000\u0000\u019a\u019b\u0005l\u0000\u0000"+ - "\u019b\u019c\u0005i\u0000\u0000\u019c\u019d\u0005m\u0000\u0000\u019d\u019e"+ - "\u0005i\u0000\u0000\u019e\u019f\u0005t\u0000\u0000\u019f\u01a0\u0001\u0000"+ - "\u0000\u0000\u01a0\u01a1\u0006\t\u0000\u0000\u01a1\u001f\u0001\u0000\u0000"+ - "\u0000\u01a2\u01a3\u0005m\u0000\u0000\u01a3\u01a4\u0005e\u0000\u0000\u01a4"+ - "\u01a5\u0005t\u0000\u0000\u01a5\u01a6\u0005a\u0000\u0000\u01a6\u01a7\u0001"+ - "\u0000\u0000\u0000\u01a7\u01a8\u0006\n\u0005\u0000\u01a8!\u0001\u0000"+ - "\u0000\u0000\u01a9\u01aa\u0005m\u0000\u0000\u01aa\u01ab\u0005v\u0000\u0000"+ - "\u01ab\u01ac\u0005_\u0000\u0000\u01ac\u01ad\u0005e\u0000\u0000\u01ad\u01ae"+ - "\u0005x\u0000\u0000\u01ae\u01af\u0005p\u0000\u0000\u01af\u01b0\u0005a"+ - "\u0000\u0000\u01b0\u01b1\u0005n\u0000\u0000\u01b1\u01b2\u0005d\u0000\u0000"+ - "\u01b2\u01b3\u0001\u0000\u0000\u0000\u01b3\u01b4\u0006\u000b\u0006\u0000"+ - "\u01b4#\u0001\u0000\u0000\u0000\u01b5\u01b6\u0005r\u0000\u0000\u01b6\u01b7"+ - "\u0005e\u0000\u0000\u01b7\u01b8\u0005n\u0000\u0000\u01b8\u01b9\u0005a"+ - "\u0000\u0000\u01b9\u01ba\u0005m\u0000\u0000\u01ba\u01bb\u0005e\u0000\u0000"+ - "\u01bb\u01bc\u0001\u0000\u0000\u0000\u01bc\u01bd\u0006\f\u0007\u0000\u01bd"+ - "%\u0001\u0000\u0000\u0000\u01be\u01bf\u0005r\u0000\u0000\u01bf\u01c0\u0005"+ - "o\u0000\u0000\u01c0\u01c1\u0005w\u0000\u0000\u01c1\u01c2\u0001\u0000\u0000"+ - "\u0000\u01c2\u01c3\u0006\r\u0000\u0000\u01c3\'\u0001\u0000\u0000\u0000"+ - "\u01c4\u01c5\u0005s\u0000\u0000\u01c5\u01c6\u0005h\u0000\u0000\u01c6\u01c7"+ - "\u0005o\u0000\u0000\u01c7\u01c8\u0005w\u0000\u0000\u01c8\u01c9\u0001\u0000"+ - "\u0000\u0000\u01c9\u01ca\u0006\u000e\b\u0000\u01ca)\u0001\u0000\u0000"+ - "\u0000\u01cb\u01cc\u0005s\u0000\u0000\u01cc\u01cd\u0005o\u0000\u0000\u01cd"+ - "\u01ce\u0005r\u0000\u0000\u01ce\u01cf\u0005t\u0000\u0000\u01cf\u01d0\u0001"+ - "\u0000\u0000\u0000\u01d0\u01d1\u0006\u000f\u0000\u0000\u01d1+\u0001\u0000"+ - "\u0000\u0000\u01d2\u01d3\u0005s\u0000\u0000\u01d3\u01d4\u0005t\u0000\u0000"+ - "\u01d4\u01d5\u0005a\u0000\u0000\u01d5\u01d6\u0005t\u0000\u0000\u01d6\u01d7"+ - "\u0005s\u0000\u0000\u01d7\u01d8\u0001\u0000\u0000\u0000\u01d8\u01d9\u0006"+ - "\u0010\u0000\u0000\u01d9-\u0001\u0000\u0000\u0000\u01da\u01db\u0005w\u0000"+ - "\u0000\u01db\u01dc\u0005h\u0000\u0000\u01dc\u01dd\u0005e\u0000\u0000\u01dd"+ - "\u01de\u0005r\u0000\u0000\u01de\u01df\u0005e\u0000\u0000\u01df\u01e0\u0001"+ - "\u0000\u0000\u0000\u01e0\u01e1\u0006\u0011\u0000\u0000\u01e1/\u0001\u0000"+ - "\u0000\u0000\u01e2\u01e4\b\u0000\u0000\u0000\u01e3\u01e2\u0001\u0000\u0000"+ - "\u0000\u01e4\u01e5\u0001\u0000\u0000\u0000\u01e5\u01e3\u0001\u0000\u0000"+ - "\u0000\u01e5\u01e6\u0001\u0000\u0000\u0000\u01e6\u01e7\u0001\u0000\u0000"+ - "\u0000\u01e7\u01e8\u0006\u0012\u0000\u0000\u01e81\u0001\u0000\u0000\u0000"+ - "\u01e9\u01ea\u0005/\u0000\u0000\u01ea\u01eb\u0005/\u0000\u0000\u01eb\u01ef"+ - "\u0001\u0000\u0000\u0000\u01ec\u01ee\b\u0001\u0000\u0000\u01ed\u01ec\u0001"+ - "\u0000\u0000\u0000\u01ee\u01f1\u0001\u0000\u0000\u0000\u01ef\u01ed\u0001"+ - "\u0000\u0000\u0000\u01ef\u01f0\u0001\u0000\u0000\u0000\u01f0\u01f3\u0001"+ - "\u0000\u0000\u0000\u01f1\u01ef\u0001\u0000\u0000\u0000\u01f2\u01f4\u0005"+ - "\r\u0000\u0000\u01f3\u01f2\u0001\u0000\u0000\u0000\u01f3\u01f4\u0001\u0000"+ - "\u0000\u0000\u01f4\u01f6\u0001\u0000\u0000\u0000\u01f5\u01f7\u0005\n\u0000"+ - "\u0000\u01f6\u01f5\u0001\u0000\u0000\u0000\u01f6\u01f7\u0001\u0000\u0000"+ - "\u0000\u01f7\u01f8\u0001\u0000\u0000\u0000\u01f8\u01f9\u0006\u0013\t\u0000"+ - "\u01f93\u0001\u0000\u0000\u0000\u01fa\u01fb\u0005/\u0000\u0000\u01fb\u01fc"+ - "\u0005*\u0000\u0000\u01fc\u0201\u0001\u0000\u0000\u0000\u01fd\u0200\u0003"+ - "4\u0014\u0000\u01fe\u0200\t\u0000\u0000\u0000\u01ff\u01fd\u0001\u0000"+ - "\u0000\u0000\u01ff\u01fe\u0001\u0000\u0000\u0000\u0200\u0203\u0001\u0000"+ - "\u0000\u0000\u0201\u0202\u0001\u0000\u0000\u0000\u0201\u01ff\u0001\u0000"+ - "\u0000\u0000\u0202\u0204\u0001\u0000\u0000\u0000\u0203\u0201\u0001\u0000"+ - "\u0000\u0000\u0204\u0205\u0005*\u0000\u0000\u0205\u0206\u0005/\u0000\u0000"+ - "\u0206\u0207\u0001\u0000\u0000\u0000\u0207\u0208\u0006\u0014\t\u0000\u0208"+ - "5\u0001\u0000\u0000\u0000\u0209\u020b\u0007\u0002\u0000\u0000\u020a\u0209"+ - "\u0001\u0000\u0000\u0000\u020b\u020c\u0001\u0000\u0000\u0000\u020c\u020a"+ - "\u0001\u0000\u0000\u0000\u020c\u020d\u0001\u0000\u0000\u0000\u020d\u020e"+ - "\u0001\u0000\u0000\u0000\u020e\u020f\u0006\u0015\t\u0000\u020f7\u0001"+ - "\u0000\u0000\u0000\u0210\u0211\u0003\u00a4L\u0000\u0211\u0212\u0001\u0000"+ - "\u0000\u0000\u0212\u0213\u0006\u0016\n\u0000\u0213\u0214\u0006\u0016\u000b"+ - "\u0000\u02149\u0001\u0000\u0000\u0000\u0215\u0216\u0003B\u001b\u0000\u0216"+ - "\u0217\u0001\u0000\u0000\u0000\u0217\u0218\u0006\u0017\f\u0000\u0218\u0219"+ - "\u0006\u0017\r\u0000\u0219;\u0001\u0000\u0000\u0000\u021a\u021b\u0003"+ - "6\u0015\u0000\u021b\u021c\u0001\u0000\u0000\u0000\u021c\u021d\u0006\u0018"+ - "\t\u0000\u021d=\u0001\u0000\u0000\u0000\u021e\u021f\u00032\u0013\u0000"+ - "\u021f\u0220\u0001\u0000\u0000\u0000\u0220\u0221\u0006\u0019\t\u0000\u0221"+ - "?\u0001\u0000\u0000\u0000\u0222\u0223\u00034\u0014\u0000\u0223\u0224\u0001"+ - "\u0000\u0000\u0000\u0224\u0225\u0006\u001a\t\u0000\u0225A\u0001\u0000"+ - "\u0000\u0000\u0226\u0227\u0005|\u0000\u0000\u0227\u0228\u0001\u0000\u0000"+ - "\u0000\u0228\u0229\u0006\u001b\r\u0000\u0229C\u0001\u0000\u0000\u0000"+ - "\u022a\u022b\u0007\u0003\u0000\u0000\u022bE\u0001\u0000\u0000\u0000\u022c"+ - "\u022d\u0007\u0004\u0000\u0000\u022dG\u0001\u0000\u0000\u0000\u022e\u022f"+ - "\u0005\\\u0000\u0000\u022f\u0230\u0007\u0005\u0000\u0000\u0230I\u0001"+ - "\u0000\u0000\u0000\u0231\u0232\b\u0006\u0000\u0000\u0232K\u0001\u0000"+ - "\u0000\u0000\u0233\u0235\u0007\u0007\u0000\u0000\u0234\u0236\u0007\b\u0000"+ - "\u0000\u0235\u0234\u0001\u0000\u0000\u0000\u0235\u0236\u0001\u0000\u0000"+ - "\u0000\u0236\u0238\u0001\u0000\u0000\u0000\u0237\u0239\u0003D\u001c\u0000"+ - "\u0238\u0237\u0001\u0000\u0000\u0000\u0239\u023a\u0001\u0000\u0000\u0000"+ - "\u023a\u0238\u0001\u0000\u0000\u0000\u023a\u023b\u0001\u0000\u0000\u0000"+ - "\u023bM\u0001\u0000\u0000\u0000\u023c\u023d\u0005@\u0000\u0000\u023dO"+ - "\u0001\u0000\u0000\u0000\u023e\u023f\u0005`\u0000\u0000\u023fQ\u0001\u0000"+ - "\u0000\u0000\u0240\u0244\b\t\u0000\u0000\u0241\u0242\u0005`\u0000\u0000"+ - "\u0242\u0244\u0005`\u0000\u0000\u0243\u0240\u0001\u0000\u0000\u0000\u0243"+ - "\u0241\u0001\u0000\u0000\u0000\u0244S\u0001\u0000\u0000\u0000\u0245\u0246"+ - "\u0005_\u0000\u0000\u0246U\u0001\u0000\u0000\u0000\u0247\u024b\u0003F"+ - "\u001d\u0000\u0248\u024b\u0003D\u001c\u0000\u0249\u024b\u0003T$\u0000"+ - "\u024a\u0247\u0001\u0000\u0000\u0000\u024a\u0248\u0001\u0000\u0000\u0000"+ - "\u024a\u0249\u0001\u0000\u0000\u0000\u024bW\u0001\u0000\u0000\u0000\u024c"+ - "\u0251\u0005\"\u0000\u0000\u024d\u0250\u0003H\u001e\u0000\u024e\u0250"+ - "\u0003J\u001f\u0000\u024f\u024d\u0001\u0000\u0000\u0000\u024f\u024e\u0001"+ - "\u0000\u0000\u0000\u0250\u0253\u0001\u0000\u0000\u0000\u0251\u024f\u0001"+ - "\u0000\u0000\u0000\u0251\u0252\u0001\u0000\u0000\u0000\u0252\u0254\u0001"+ - "\u0000\u0000\u0000\u0253\u0251\u0001\u0000\u0000\u0000\u0254\u026a\u0005"+ - "\"\u0000\u0000\u0255\u0256\u0005\"\u0000\u0000\u0256\u0257\u0005\"\u0000"+ - "\u0000\u0257\u0258\u0005\"\u0000\u0000\u0258\u025c\u0001\u0000\u0000\u0000"+ - "\u0259\u025b\b\u0001\u0000\u0000\u025a\u0259\u0001\u0000\u0000\u0000\u025b"+ - "\u025e\u0001\u0000\u0000\u0000\u025c\u025d\u0001\u0000\u0000\u0000\u025c"+ - "\u025a\u0001\u0000\u0000\u0000\u025d\u025f\u0001\u0000\u0000\u0000\u025e"+ - "\u025c\u0001\u0000\u0000\u0000\u025f\u0260\u0005\"\u0000\u0000\u0260\u0261"+ - "\u0005\"\u0000\u0000\u0261\u0262\u0005\"\u0000\u0000\u0262\u0264\u0001"+ - "\u0000\u0000\u0000\u0263\u0265\u0005\"\u0000\u0000\u0264\u0263\u0001\u0000"+ - "\u0000\u0000\u0264\u0265\u0001\u0000\u0000\u0000\u0265\u0267\u0001\u0000"+ - "\u0000\u0000\u0266\u0268\u0005\"\u0000\u0000\u0267\u0266\u0001\u0000\u0000"+ - "\u0000\u0267\u0268\u0001\u0000\u0000\u0000\u0268\u026a\u0001\u0000\u0000"+ - "\u0000\u0269\u024c\u0001\u0000\u0000\u0000\u0269\u0255\u0001\u0000\u0000"+ - "\u0000\u026aY\u0001\u0000\u0000\u0000\u026b\u026d\u0003D\u001c\u0000\u026c"+ - "\u026b\u0001\u0000\u0000\u0000\u026d\u026e\u0001\u0000\u0000\u0000\u026e"+ - "\u026c\u0001\u0000\u0000\u0000\u026e\u026f\u0001\u0000\u0000\u0000\u026f"+ - "[\u0001\u0000\u0000\u0000\u0270\u0272\u0003D\u001c\u0000\u0271\u0270\u0001"+ - "\u0000\u0000\u0000\u0272\u0273\u0001\u0000\u0000\u0000\u0273\u0271\u0001"+ - "\u0000\u0000\u0000\u0273\u0274\u0001\u0000\u0000\u0000\u0274\u0275\u0001"+ - "\u0000\u0000\u0000\u0275\u0279\u0003l0\u0000\u0276\u0278\u0003D\u001c"+ - "\u0000\u0277\u0276\u0001\u0000\u0000\u0000\u0278\u027b\u0001\u0000\u0000"+ - "\u0000\u0279\u0277\u0001\u0000\u0000\u0000\u0279\u027a\u0001\u0000\u0000"+ - "\u0000\u027a\u029b\u0001\u0000\u0000\u0000\u027b\u0279\u0001\u0000\u0000"+ - "\u0000\u027c\u027e\u0003l0\u0000\u027d\u027f\u0003D\u001c\u0000\u027e"+ - "\u027d\u0001\u0000\u0000\u0000\u027f\u0280\u0001\u0000\u0000\u0000\u0280"+ - "\u027e\u0001\u0000\u0000\u0000\u0280\u0281\u0001\u0000\u0000\u0000\u0281"+ - "\u029b\u0001\u0000\u0000\u0000\u0282\u0284\u0003D\u001c\u0000\u0283\u0282"+ - "\u0001\u0000\u0000\u0000\u0284\u0285\u0001\u0000\u0000\u0000\u0285\u0283"+ - "\u0001\u0000\u0000\u0000\u0285\u0286\u0001\u0000\u0000\u0000\u0286\u028e"+ - "\u0001\u0000\u0000\u0000\u0287\u028b\u0003l0\u0000\u0288\u028a\u0003D"+ - "\u001c\u0000\u0289\u0288\u0001\u0000\u0000\u0000\u028a\u028d\u0001\u0000"+ - "\u0000\u0000\u028b\u0289\u0001\u0000\u0000\u0000\u028b\u028c\u0001\u0000"+ - "\u0000\u0000\u028c\u028f\u0001\u0000\u0000\u0000\u028d\u028b\u0001\u0000"+ - "\u0000\u0000\u028e\u0287\u0001\u0000\u0000\u0000\u028e\u028f\u0001\u0000"+ - "\u0000\u0000\u028f\u0290\u0001\u0000\u0000\u0000\u0290\u0291\u0003L \u0000"+ - "\u0291\u029b\u0001\u0000\u0000\u0000\u0292\u0294\u0003l0\u0000\u0293\u0295"+ - "\u0003D\u001c\u0000\u0294\u0293\u0001\u0000\u0000\u0000\u0295\u0296\u0001"+ - "\u0000\u0000\u0000\u0296\u0294\u0001\u0000\u0000\u0000\u0296\u0297\u0001"+ - "\u0000\u0000\u0000\u0297\u0298\u0001\u0000\u0000\u0000\u0298\u0299\u0003"+ - "L \u0000\u0299\u029b\u0001\u0000\u0000\u0000\u029a\u0271\u0001\u0000\u0000"+ - "\u0000\u029a\u027c\u0001\u0000\u0000\u0000\u029a\u0283\u0001\u0000\u0000"+ - "\u0000\u029a\u0292\u0001\u0000\u0000\u0000\u029b]\u0001\u0000\u0000\u0000"+ - "\u029c\u029d\u0005b\u0000\u0000\u029d\u029e\u0005y\u0000\u0000\u029e_"+ - "\u0001\u0000\u0000\u0000\u029f\u02a0\u0005a\u0000\u0000\u02a0\u02a1\u0005"+ - "n\u0000\u0000\u02a1\u02a2\u0005d\u0000\u0000\u02a2a\u0001\u0000\u0000"+ - "\u0000\u02a3\u02a4\u0005a\u0000\u0000\u02a4\u02a5\u0005s\u0000\u0000\u02a5"+ - "\u02a6\u0005c\u0000\u0000\u02a6c\u0001\u0000\u0000\u0000\u02a7\u02a8\u0005"+ - "=\u0000\u0000\u02a8e\u0001\u0000\u0000\u0000\u02a9\u02aa\u0005:\u0000"+ - "\u0000\u02aa\u02ab\u0005:\u0000\u0000\u02abg\u0001\u0000\u0000\u0000\u02ac"+ - "\u02ad\u0005,\u0000\u0000\u02adi\u0001\u0000\u0000\u0000\u02ae\u02af\u0005"+ - "d\u0000\u0000\u02af\u02b0\u0005e\u0000\u0000\u02b0\u02b1\u0005s\u0000"+ - "\u0000\u02b1\u02b2\u0005c\u0000\u0000\u02b2k\u0001\u0000\u0000\u0000\u02b3"+ - "\u02b4\u0005.\u0000\u0000\u02b4m\u0001\u0000\u0000\u0000\u02b5\u02b6\u0005"+ - "f\u0000\u0000\u02b6\u02b7\u0005a\u0000\u0000\u02b7\u02b8\u0005l\u0000"+ - "\u0000\u02b8\u02b9\u0005s\u0000\u0000\u02b9\u02ba\u0005e\u0000\u0000\u02ba"+ - "o\u0001\u0000\u0000\u0000\u02bb\u02bc\u0005f\u0000\u0000\u02bc\u02bd\u0005"+ - "i\u0000\u0000\u02bd\u02be\u0005r\u0000\u0000\u02be\u02bf\u0005s\u0000"+ - "\u0000\u02bf\u02c0\u0005t\u0000\u0000\u02c0q\u0001\u0000\u0000\u0000\u02c1"+ - "\u02c2\u0005l\u0000\u0000\u02c2\u02c3\u0005a\u0000\u0000\u02c3\u02c4\u0005"+ - "s\u0000\u0000\u02c4\u02c5\u0005t\u0000\u0000\u02c5s\u0001\u0000\u0000"+ - "\u0000\u02c6\u02c7\u0005(\u0000\u0000\u02c7u\u0001\u0000\u0000\u0000\u02c8"+ - "\u02c9\u0005i\u0000\u0000\u02c9\u02ca\u0005n\u0000\u0000\u02caw\u0001"+ - "\u0000\u0000\u0000\u02cb\u02cc\u0005i\u0000\u0000\u02cc\u02cd\u0005s\u0000"+ - "\u0000\u02cdy\u0001\u0000\u0000\u0000\u02ce\u02cf\u0005l\u0000\u0000\u02cf"+ - "\u02d0\u0005i\u0000\u0000\u02d0\u02d1\u0005k\u0000\u0000\u02d1\u02d2\u0005"+ - "e\u0000\u0000\u02d2{\u0001\u0000\u0000\u0000\u02d3\u02d4\u0005n\u0000"+ - "\u0000\u02d4\u02d5\u0005o\u0000\u0000\u02d5\u02d6\u0005t\u0000\u0000\u02d6"+ - "}\u0001\u0000\u0000\u0000\u02d7\u02d8\u0005n\u0000\u0000\u02d8\u02d9\u0005"+ - "u\u0000\u0000\u02d9\u02da\u0005l\u0000\u0000\u02da\u02db\u0005l\u0000"+ - "\u0000\u02db\u007f\u0001\u0000\u0000\u0000\u02dc\u02dd\u0005n\u0000\u0000"+ - "\u02dd\u02de\u0005u\u0000\u0000\u02de\u02df\u0005l\u0000\u0000\u02df\u02e0"+ - "\u0005l\u0000\u0000\u02e0\u02e1\u0005s\u0000\u0000\u02e1\u0081\u0001\u0000"+ - "\u0000\u0000\u02e2\u02e3\u0005o\u0000\u0000\u02e3\u02e4\u0005r\u0000\u0000"+ - "\u02e4\u0083\u0001\u0000\u0000\u0000\u02e5\u02e6\u0005?\u0000\u0000\u02e6"+ - "\u0085\u0001\u0000\u0000\u0000\u02e7\u02e8\u0005r\u0000\u0000\u02e8\u02e9"+ - "\u0005l\u0000\u0000\u02e9\u02ea\u0005i\u0000\u0000\u02ea\u02eb\u0005k"+ - "\u0000\u0000\u02eb\u02ec\u0005e\u0000\u0000\u02ec\u0087\u0001\u0000\u0000"+ - "\u0000\u02ed\u02ee\u0005)\u0000\u0000\u02ee\u0089\u0001\u0000\u0000\u0000"+ - "\u02ef\u02f0\u0005t\u0000\u0000\u02f0\u02f1\u0005r\u0000\u0000\u02f1\u02f2"+ - "\u0005u\u0000\u0000\u02f2\u02f3\u0005e\u0000\u0000\u02f3\u008b\u0001\u0000"+ - "\u0000\u0000\u02f4\u02f5\u0005=\u0000\u0000\u02f5\u02f6\u0005=\u0000\u0000"+ - "\u02f6\u008d\u0001\u0000\u0000\u0000\u02f7\u02f8\u0005=\u0000\u0000\u02f8"+ - "\u02f9\u0005~\u0000\u0000\u02f9\u008f\u0001\u0000\u0000\u0000\u02fa\u02fb"+ - "\u0005!\u0000\u0000\u02fb\u02fc\u0005=\u0000\u0000\u02fc\u0091\u0001\u0000"+ - "\u0000\u0000\u02fd\u02fe\u0005<\u0000\u0000\u02fe\u0093\u0001\u0000\u0000"+ - "\u0000\u02ff\u0300\u0005<\u0000\u0000\u0300\u0301\u0005=\u0000\u0000\u0301"+ - "\u0095\u0001\u0000\u0000\u0000\u0302\u0303\u0005>\u0000\u0000\u0303\u0097"+ - "\u0001\u0000\u0000\u0000\u0304\u0305\u0005>\u0000\u0000\u0305\u0306\u0005"+ - "=\u0000\u0000\u0306\u0099\u0001\u0000\u0000\u0000\u0307\u0308\u0005+\u0000"+ - "\u0000\u0308\u009b\u0001\u0000\u0000\u0000\u0309\u030a\u0005-\u0000\u0000"+ - "\u030a\u009d\u0001\u0000\u0000\u0000\u030b\u030c\u0005*\u0000\u0000\u030c"+ - "\u009f\u0001\u0000\u0000\u0000\u030d\u030e\u0005/\u0000\u0000\u030e\u00a1"+ - "\u0001\u0000\u0000\u0000\u030f\u0310\u0005%\u0000\u0000\u0310\u00a3\u0001"+ - "\u0000\u0000\u0000\u0311\u0312\u0005[\u0000\u0000\u0312\u0313\u0001\u0000"+ - "\u0000\u0000\u0313\u0314\u0006L\u0000\u0000\u0314\u0315\u0006L\u0000\u0000"+ - "\u0315\u00a5\u0001\u0000\u0000\u0000\u0316\u0317\u0005]\u0000\u0000\u0317"+ - "\u0318\u0001\u0000\u0000\u0000\u0318\u0319\u0006M\r\u0000\u0319\u031a"+ - "\u0006M\r\u0000\u031a\u00a7\u0001\u0000\u0000\u0000\u031b\u031f\u0003"+ - "F\u001d\u0000\u031c\u031e\u0003V%\u0000\u031d\u031c\u0001\u0000\u0000"+ - "\u0000\u031e\u0321\u0001\u0000\u0000\u0000\u031f\u031d\u0001\u0000\u0000"+ - "\u0000\u031f\u0320\u0001\u0000\u0000\u0000\u0320\u032c\u0001\u0000\u0000"+ - "\u0000\u0321\u031f\u0001\u0000\u0000\u0000\u0322\u0325\u0003T$\u0000\u0323"+ - "\u0325\u0003N!\u0000\u0324\u0322\u0001\u0000\u0000\u0000\u0324\u0323\u0001"+ - "\u0000\u0000\u0000\u0325\u0327\u0001\u0000\u0000\u0000\u0326\u0328\u0003"+ - "V%\u0000\u0327\u0326\u0001\u0000\u0000\u0000\u0328\u0329\u0001\u0000\u0000"+ - "\u0000\u0329\u0327\u0001\u0000\u0000\u0000\u0329\u032a\u0001\u0000\u0000"+ - "\u0000\u032a\u032c\u0001\u0000\u0000\u0000\u032b\u031b\u0001\u0000\u0000"+ - "\u0000\u032b\u0324\u0001\u0000\u0000\u0000\u032c\u00a9\u0001\u0000\u0000"+ - "\u0000\u032d\u032f\u0003P\"\u0000\u032e\u0330\u0003R#\u0000\u032f\u032e"+ - "\u0001\u0000\u0000\u0000\u0330\u0331\u0001\u0000\u0000\u0000\u0331\u032f"+ - "\u0001\u0000\u0000\u0000\u0331\u0332\u0001\u0000\u0000\u0000\u0332\u0333"+ - "\u0001\u0000\u0000\u0000\u0333\u0334\u0003P\"\u0000\u0334\u00ab\u0001"+ - "\u0000\u0000\u0000\u0335\u0336\u0003\u00aaO\u0000\u0336\u00ad\u0001\u0000"+ - "\u0000\u0000\u0337\u0338\u00032\u0013\u0000\u0338\u0339\u0001\u0000\u0000"+ - "\u0000\u0339\u033a\u0006Q\t\u0000\u033a\u00af\u0001\u0000\u0000\u0000"+ - "\u033b\u033c\u00034\u0014\u0000\u033c\u033d\u0001\u0000\u0000\u0000\u033d"+ - "\u033e\u0006R\t\u0000\u033e\u00b1\u0001\u0000\u0000\u0000\u033f\u0340"+ - "\u00036\u0015\u0000\u0340\u0341\u0001\u0000\u0000\u0000\u0341\u0342\u0006"+ - "S\t\u0000\u0342\u00b3\u0001\u0000\u0000\u0000\u0343\u0344\u0003B\u001b"+ - "\u0000\u0344\u0345\u0001\u0000\u0000\u0000\u0345\u0346\u0006T\f\u0000"+ - "\u0346\u0347\u0006T\r\u0000\u0347\u00b5\u0001\u0000\u0000\u0000\u0348"+ - "\u0349\u0003\u00a4L\u0000\u0349\u034a\u0001\u0000\u0000\u0000\u034a\u034b"+ - "\u0006U\n\u0000\u034b\u00b7\u0001\u0000\u0000\u0000\u034c\u034d\u0003"+ - "\u00a6M\u0000\u034d\u034e\u0001\u0000\u0000\u0000\u034e\u034f\u0006V\u000e"+ - "\u0000\u034f\u00b9\u0001\u0000\u0000\u0000\u0350\u0351\u0003h.\u0000\u0351"+ - "\u0352\u0001\u0000\u0000\u0000\u0352\u0353\u0006W\u000f\u0000\u0353\u00bb"+ - "\u0001\u0000\u0000\u0000\u0354\u0355\u0003d,\u0000\u0355\u0356\u0001\u0000"+ - "\u0000\u0000\u0356\u0357\u0006X\u0010\u0000\u0357\u00bd\u0001\u0000\u0000"+ - "\u0000\u0358\u0359\u0003X&\u0000\u0359\u035a\u0001\u0000\u0000\u0000\u035a"+ - "\u035b\u0006Y\u0011\u0000\u035b\u00bf\u0001\u0000\u0000\u0000\u035c\u035d"+ - "\u0005o\u0000\u0000\u035d\u035e\u0005p\u0000\u0000\u035e\u035f\u0005t"+ - "\u0000\u0000\u035f\u0360\u0005i\u0000\u0000\u0360\u0361\u0005o\u0000\u0000"+ - "\u0361\u0362\u0005n\u0000\u0000\u0362\u0363\u0005s\u0000\u0000\u0363\u00c1"+ - "\u0001\u0000\u0000\u0000\u0364\u0365\u0005m\u0000\u0000\u0365\u0366\u0005"+ - "e\u0000\u0000\u0366\u0367\u0005t\u0000\u0000\u0367\u0368\u0005a\u0000"+ - "\u0000\u0368\u0369\u0005d\u0000\u0000\u0369\u036a\u0005a\u0000\u0000\u036a"+ - "\u036b\u0005t\u0000\u0000\u036b\u036c\u0005a\u0000\u0000\u036c\u00c3\u0001"+ - "\u0000\u0000\u0000\u036d\u0371\b\n\u0000\u0000\u036e\u036f\u0005/\u0000"+ - "\u0000\u036f\u0371\b\u000b\u0000\u0000\u0370\u036d\u0001\u0000\u0000\u0000"+ - "\u0370\u036e\u0001\u0000\u0000\u0000\u0371\u00c5\u0001\u0000\u0000\u0000"+ - "\u0372\u0374\u0003\u00c4\\\u0000\u0373\u0372\u0001\u0000\u0000\u0000\u0374"+ - "\u0375\u0001\u0000\u0000\u0000\u0375\u0373\u0001\u0000\u0000\u0000\u0375"+ - "\u0376\u0001\u0000\u0000\u0000\u0376\u00c7\u0001\u0000\u0000\u0000\u0377"+ - "\u0378\u0003\u00acP\u0000\u0378\u0379\u0001\u0000\u0000\u0000\u0379\u037a"+ - "\u0006^\u0012\u0000\u037a\u00c9\u0001\u0000\u0000\u0000\u037b\u037c\u0003"+ - "2\u0013\u0000\u037c\u037d\u0001\u0000\u0000\u0000\u037d\u037e\u0006_\t"+ - "\u0000\u037e\u00cb\u0001\u0000\u0000\u0000\u037f\u0380\u00034\u0014\u0000"+ - "\u0380\u0381\u0001\u0000\u0000\u0000\u0381\u0382\u0006`\t\u0000\u0382"+ - "\u00cd\u0001\u0000\u0000\u0000\u0383\u0384\u00036\u0015\u0000\u0384\u0385"+ - "\u0001\u0000\u0000\u0000\u0385\u0386\u0006a\t\u0000\u0386\u00cf\u0001"+ - "\u0000\u0000\u0000\u0387\u0388\u0003B\u001b\u0000\u0388\u0389\u0001\u0000"+ - "\u0000\u0000\u0389\u038a\u0006b\f\u0000\u038a\u038b\u0006b\r\u0000\u038b"+ - "\u00d1\u0001\u0000\u0000\u0000\u038c\u038d\u0003l0\u0000\u038d\u038e\u0001"+ - "\u0000\u0000\u0000\u038e\u038f\u0006c\u0013\u0000\u038f\u00d3\u0001\u0000"+ - "\u0000\u0000\u0390\u0391\u0003h.\u0000\u0391\u0392\u0001\u0000\u0000\u0000"+ - "\u0392\u0393\u0006d\u000f\u0000\u0393\u00d5\u0001\u0000\u0000\u0000\u0394"+ - "\u0399\u0003F\u001d\u0000\u0395\u0399\u0003D\u001c\u0000\u0396\u0399\u0003"+ - "T$\u0000\u0397\u0399\u0003\u009eI\u0000\u0398\u0394\u0001\u0000\u0000"+ - "\u0000\u0398\u0395\u0001\u0000\u0000\u0000\u0398\u0396\u0001\u0000\u0000"+ - "\u0000\u0398\u0397\u0001\u0000\u0000\u0000\u0399\u00d7\u0001\u0000\u0000"+ - "\u0000\u039a\u039d\u0003F\u001d\u0000\u039b\u039d\u0003\u009eI\u0000\u039c"+ - "\u039a\u0001\u0000\u0000\u0000\u039c\u039b\u0001\u0000\u0000\u0000\u039d"+ - "\u03a1\u0001\u0000\u0000\u0000\u039e\u03a0\u0003\u00d6e\u0000\u039f\u039e"+ - "\u0001\u0000\u0000\u0000\u03a0\u03a3\u0001\u0000\u0000\u0000\u03a1\u039f"+ - "\u0001\u0000\u0000\u0000\u03a1\u03a2\u0001\u0000\u0000\u0000\u03a2\u03ae"+ - "\u0001\u0000\u0000\u0000\u03a3\u03a1\u0001\u0000\u0000\u0000\u03a4\u03a7"+ - "\u0003T$\u0000\u03a5\u03a7\u0003N!\u0000\u03a6\u03a4\u0001\u0000\u0000"+ - "\u0000\u03a6\u03a5\u0001\u0000\u0000\u0000\u03a7\u03a9\u0001\u0000\u0000"+ - "\u0000\u03a8\u03aa\u0003\u00d6e\u0000\u03a9\u03a8\u0001\u0000\u0000\u0000"+ - "\u03aa\u03ab\u0001\u0000\u0000\u0000\u03ab\u03a9\u0001\u0000\u0000\u0000"+ - "\u03ab\u03ac\u0001\u0000\u0000\u0000\u03ac\u03ae\u0001\u0000\u0000\u0000"+ - "\u03ad\u039c\u0001\u0000\u0000\u0000\u03ad\u03a6\u0001\u0000\u0000\u0000"+ - "\u03ae\u00d9\u0001\u0000\u0000\u0000\u03af\u03b2\u0003\u00d8f\u0000\u03b0"+ - "\u03b2\u0003\u00aaO\u0000\u03b1\u03af\u0001\u0000\u0000\u0000\u03b1\u03b0"+ - "\u0001\u0000\u0000\u0000\u03b2\u03b3\u0001\u0000\u0000\u0000\u03b3\u03b1"+ - "\u0001\u0000\u0000\u0000\u03b3\u03b4\u0001\u0000\u0000\u0000\u03b4\u00db"+ - "\u0001\u0000\u0000\u0000\u03b5\u03b6\u00032\u0013\u0000\u03b6\u03b7\u0001"+ - "\u0000\u0000\u0000\u03b7\u03b8\u0006h\t\u0000\u03b8\u00dd\u0001\u0000"+ - "\u0000\u0000\u03b9\u03ba\u00034\u0014\u0000\u03ba\u03bb\u0001\u0000\u0000"+ - "\u0000\u03bb\u03bc\u0006i\t\u0000\u03bc\u00df\u0001\u0000\u0000\u0000"+ - "\u03bd\u03be\u00036\u0015\u0000\u03be\u03bf\u0001\u0000\u0000\u0000\u03bf"+ - "\u03c0\u0006j\t\u0000\u03c0\u00e1\u0001\u0000\u0000\u0000\u03c1\u03c2"+ - "\u0003B\u001b\u0000\u03c2\u03c3\u0001\u0000\u0000\u0000\u03c3\u03c4\u0006"+ - "k\f\u0000\u03c4\u03c5\u0006k\r\u0000\u03c5\u00e3\u0001\u0000\u0000\u0000"+ - "\u03c6\u03c7\u0003d,\u0000\u03c7\u03c8\u0001\u0000\u0000\u0000\u03c8\u03c9"+ - "\u0006l\u0010\u0000\u03c9\u00e5\u0001\u0000\u0000\u0000\u03ca\u03cb\u0003"+ - "h.\u0000\u03cb\u03cc\u0001\u0000\u0000\u0000\u03cc\u03cd\u0006m\u000f"+ - "\u0000\u03cd\u00e7\u0001\u0000\u0000\u0000\u03ce\u03cf\u0003l0\u0000\u03cf"+ - "\u03d0\u0001\u0000\u0000\u0000\u03d0\u03d1\u0006n\u0013\u0000\u03d1\u00e9"+ - "\u0001\u0000\u0000\u0000\u03d2\u03d3\u0005a\u0000\u0000\u03d3\u03d4\u0005"+ - "s\u0000\u0000\u03d4\u00eb\u0001\u0000\u0000\u0000\u03d5\u03d6\u0003\u00da"+ - "g\u0000\u03d6\u03d7\u0001\u0000\u0000\u0000\u03d7\u03d8\u0006p\u0014\u0000"+ - "\u03d8\u00ed\u0001\u0000\u0000\u0000\u03d9\u03da\u00032\u0013\u0000\u03da"+ - "\u03db\u0001\u0000\u0000\u0000\u03db\u03dc\u0006q\t\u0000\u03dc\u00ef"+ - "\u0001\u0000\u0000\u0000\u03dd\u03de\u00034\u0014\u0000\u03de\u03df\u0001"+ - "\u0000\u0000\u0000\u03df\u03e0\u0006r\t\u0000\u03e0\u00f1\u0001\u0000"+ - "\u0000\u0000\u03e1\u03e2\u00036\u0015\u0000\u03e2\u03e3\u0001\u0000\u0000"+ - "\u0000\u03e3\u03e4\u0006s\t\u0000\u03e4\u00f3\u0001\u0000\u0000\u0000"+ - "\u03e5\u03e6\u0003B\u001b\u0000\u03e6\u03e7\u0001\u0000\u0000\u0000\u03e7"+ - "\u03e8\u0006t\f\u0000\u03e8\u03e9\u0006t\r\u0000\u03e9\u00f5\u0001\u0000"+ - "\u0000\u0000\u03ea\u03eb\u0003\u00a4L\u0000\u03eb\u03ec\u0001\u0000\u0000"+ - "\u0000\u03ec\u03ed\u0006u\n\u0000\u03ed\u03ee\u0006u\u0015\u0000\u03ee"+ - "\u00f7\u0001\u0000\u0000\u0000\u03ef\u03f0\u0005o\u0000\u0000\u03f0\u03f1"+ - "\u0005n\u0000\u0000\u03f1\u03f2\u0001\u0000\u0000\u0000\u03f2\u03f3\u0006"+ - "v\u0016\u0000\u03f3\u00f9\u0001\u0000\u0000\u0000\u03f4\u03f5\u0005w\u0000"+ - "\u0000\u03f5\u03f6\u0005i\u0000\u0000\u03f6\u03f7\u0005t\u0000\u0000\u03f7"+ - "\u03f8\u0005h\u0000\u0000\u03f8\u03f9\u0001\u0000\u0000\u0000\u03f9\u03fa"+ - "\u0006w\u0016\u0000\u03fa\u00fb\u0001\u0000\u0000\u0000\u03fb\u03fc\b"+ - "\f\u0000\u0000\u03fc\u00fd\u0001\u0000\u0000\u0000\u03fd\u03ff\u0003\u00fc"+ - "x\u0000\u03fe\u03fd\u0001\u0000\u0000\u0000\u03ff\u0400\u0001\u0000\u0000"+ - "\u0000\u0400\u03fe\u0001\u0000\u0000\u0000\u0400\u0401\u0001\u0000\u0000"+ - "\u0000\u0401\u0402\u0001\u0000\u0000\u0000\u0402\u0403\u0003\u0142\u009b"+ - "\u0000\u0403\u0405\u0001\u0000\u0000\u0000\u0404\u03fe\u0001\u0000\u0000"+ - "\u0000\u0404\u0405\u0001\u0000\u0000\u0000\u0405\u0407\u0001\u0000\u0000"+ - "\u0000\u0406\u0408\u0003\u00fcx\u0000\u0407\u0406\u0001\u0000\u0000\u0000"+ - "\u0408\u0409\u0001\u0000\u0000\u0000\u0409\u0407\u0001\u0000\u0000\u0000"+ - "\u0409\u040a\u0001\u0000\u0000\u0000\u040a\u00ff\u0001\u0000\u0000\u0000"+ - "\u040b\u040c\u0003\u00acP\u0000\u040c\u040d\u0001\u0000\u0000\u0000\u040d"+ - "\u040e\u0006z\u0012\u0000\u040e\u0101\u0001\u0000\u0000\u0000\u040f\u0410"+ - "\u0003\u00fey\u0000\u0410\u0411\u0001\u0000\u0000\u0000\u0411\u0412\u0006"+ - "{\u0017\u0000\u0412\u0103\u0001\u0000\u0000\u0000\u0413\u0414\u00032\u0013"+ - "\u0000\u0414\u0415\u0001\u0000\u0000\u0000\u0415\u0416\u0006|\t\u0000"+ - "\u0416\u0105\u0001\u0000\u0000\u0000\u0417\u0418\u00034\u0014\u0000\u0418"+ - "\u0419\u0001\u0000\u0000\u0000\u0419\u041a\u0006}\t\u0000\u041a\u0107"+ - "\u0001\u0000\u0000\u0000\u041b\u041c\u00036\u0015\u0000\u041c\u041d\u0001"+ - "\u0000\u0000\u0000\u041d\u041e\u0006~\t\u0000\u041e\u0109\u0001\u0000"+ - "\u0000\u0000\u041f\u0420\u0003B\u001b\u0000\u0420\u0421\u0001\u0000\u0000"+ - "\u0000\u0421\u0422\u0006\u007f\f\u0000\u0422\u0423\u0006\u007f\r\u0000"+ - "\u0423\u0424\u0006\u007f\r\u0000\u0424\u010b\u0001\u0000\u0000\u0000\u0425"+ - "\u0426\u0003d,\u0000\u0426\u0427\u0001\u0000\u0000\u0000\u0427\u0428\u0006"+ - "\u0080\u0010\u0000\u0428\u010d\u0001\u0000\u0000\u0000\u0429\u042a\u0003"+ - "h.\u0000\u042a\u042b\u0001\u0000\u0000\u0000\u042b\u042c\u0006\u0081\u000f"+ - "\u0000\u042c\u010f\u0001\u0000\u0000\u0000\u042d\u042e\u0003l0\u0000\u042e"+ - "\u042f\u0001\u0000\u0000\u0000\u042f\u0430\u0006\u0082\u0013\u0000\u0430"+ - "\u0111\u0001\u0000\u0000\u0000\u0431\u0432\u0003\u00faw\u0000\u0432\u0433"+ - "\u0001\u0000\u0000\u0000\u0433\u0434\u0006\u0083\u0018\u0000\u0434\u0113"+ - "\u0001\u0000\u0000\u0000\u0435\u0436\u0003\u00dag\u0000\u0436\u0437\u0001"+ - "\u0000\u0000\u0000\u0437\u0438\u0006\u0084\u0014\u0000\u0438\u0115\u0001"+ - "\u0000\u0000\u0000\u0439\u043a\u0003\u00acP\u0000\u043a\u043b\u0001\u0000"+ - "\u0000\u0000\u043b\u043c\u0006\u0085\u0012\u0000\u043c\u0117\u0001\u0000"+ - "\u0000\u0000\u043d\u043e\u00032\u0013\u0000\u043e\u043f\u0001\u0000\u0000"+ - "\u0000\u043f\u0440\u0006\u0086\t\u0000\u0440\u0119\u0001\u0000\u0000\u0000"+ - "\u0441\u0442\u00034\u0014\u0000\u0442\u0443\u0001\u0000\u0000\u0000\u0443"+ - "\u0444\u0006\u0087\t\u0000\u0444\u011b\u0001\u0000\u0000\u0000\u0445\u0446"+ - "\u00036\u0015\u0000\u0446\u0447\u0001\u0000\u0000\u0000\u0447\u0448\u0006"+ - "\u0088\t\u0000\u0448\u011d\u0001\u0000\u0000\u0000\u0449\u044a\u0003B"+ - "\u001b\u0000\u044a\u044b\u0001\u0000\u0000\u0000\u044b\u044c\u0006\u0089"+ - "\f\u0000\u044c\u044d\u0006\u0089\r\u0000\u044d\u011f\u0001\u0000\u0000"+ - "\u0000\u044e\u044f\u0003l0\u0000\u044f\u0450\u0001\u0000\u0000\u0000\u0450"+ - "\u0451\u0006\u008a\u0013\u0000\u0451\u0121\u0001\u0000\u0000\u0000\u0452"+ - "\u0453\u0003\u00acP\u0000\u0453\u0454\u0001\u0000\u0000\u0000\u0454\u0455"+ - "\u0006\u008b\u0012\u0000\u0455\u0123\u0001\u0000\u0000\u0000\u0456\u0457"+ - "\u0003\u00a8N\u0000\u0457\u0458\u0001\u0000\u0000\u0000\u0458\u0459\u0006"+ - "\u008c\u0019\u0000\u0459\u0125\u0001\u0000\u0000\u0000\u045a\u045b\u0003"+ - "2\u0013\u0000\u045b\u045c\u0001\u0000\u0000\u0000\u045c\u045d\u0006\u008d"+ - "\t\u0000\u045d\u0127\u0001\u0000\u0000\u0000\u045e\u045f\u00034\u0014"+ - "\u0000\u045f\u0460\u0001\u0000\u0000\u0000\u0460\u0461\u0006\u008e\t\u0000"+ - "\u0461\u0129\u0001\u0000\u0000\u0000\u0462\u0463\u00036\u0015\u0000\u0463"+ - "\u0464\u0001\u0000\u0000\u0000\u0464\u0465\u0006\u008f\t\u0000\u0465\u012b"+ - "\u0001\u0000\u0000\u0000\u0466\u0467\u0003B\u001b\u0000\u0467\u0468\u0001"+ - "\u0000\u0000\u0000\u0468\u0469\u0006\u0090\f\u0000\u0469\u046a\u0006\u0090"+ - "\r\u0000\u046a\u012d\u0001\u0000\u0000\u0000\u046b\u046c\u0005i\u0000"+ - "\u0000\u046c\u046d\u0005n\u0000\u0000\u046d\u046e\u0005f\u0000\u0000\u046e"+ - "\u046f\u0005o\u0000\u0000\u046f\u012f\u0001\u0000\u0000\u0000\u0470\u0471"+ - "\u00032\u0013\u0000\u0471\u0472\u0001\u0000\u0000\u0000\u0472\u0473\u0006"+ - "\u0092\t\u0000\u0473\u0131\u0001\u0000\u0000\u0000\u0474\u0475\u00034"+ - "\u0014\u0000\u0475\u0476\u0001\u0000\u0000\u0000\u0476\u0477\u0006\u0093"+ - "\t\u0000\u0477\u0133\u0001\u0000\u0000\u0000\u0478\u0479\u00036\u0015"+ - "\u0000\u0479\u047a\u0001\u0000\u0000\u0000\u047a\u047b\u0006\u0094\t\u0000"+ - "\u047b\u0135\u0001\u0000\u0000\u0000\u047c\u047d\u0003B\u001b\u0000\u047d"+ - "\u047e\u0001\u0000\u0000\u0000\u047e\u047f\u0006\u0095\f\u0000\u047f\u0480"+ - "\u0006\u0095\r\u0000\u0480\u0137\u0001\u0000\u0000\u0000\u0481\u0482\u0005"+ - "f\u0000\u0000\u0482\u0483\u0005u\u0000\u0000\u0483\u0484\u0005n\u0000"+ - "\u0000\u0484\u0485\u0005c\u0000\u0000\u0485\u0486\u0005t\u0000\u0000\u0486"+ - "\u0487\u0005i\u0000\u0000\u0487\u0488\u0005o\u0000\u0000\u0488\u0489\u0005"+ - "n\u0000\u0000\u0489\u048a\u0005s\u0000\u0000\u048a\u0139\u0001\u0000\u0000"+ - "\u0000\u048b\u048c\u00032\u0013\u0000\u048c\u048d\u0001\u0000\u0000\u0000"+ - "\u048d\u048e\u0006\u0097\t\u0000\u048e\u013b\u0001\u0000\u0000\u0000\u048f"+ - "\u0490\u00034\u0014\u0000\u0490\u0491\u0001\u0000\u0000\u0000\u0491\u0492"+ - "\u0006\u0098\t\u0000\u0492\u013d\u0001\u0000\u0000\u0000\u0493\u0494\u0003"+ - "6\u0015\u0000\u0494\u0495\u0001\u0000\u0000\u0000\u0495\u0496\u0006\u0099"+ - "\t\u0000\u0496\u013f\u0001\u0000\u0000\u0000\u0497\u0498\u0003\u00a6M"+ - "\u0000\u0498\u0499\u0001\u0000\u0000\u0000\u0499\u049a\u0006\u009a\u000e"+ - "\u0000\u049a\u049b\u0006\u009a\r\u0000\u049b\u0141\u0001\u0000\u0000\u0000"+ - "\u049c\u049d\u0005:\u0000\u0000\u049d\u0143\u0001\u0000\u0000\u0000\u049e"+ - "\u04a4\u0003N!\u0000\u049f\u04a4\u0003D\u001c\u0000\u04a0\u04a4\u0003"+ - "l0\u0000\u04a1\u04a4\u0003F\u001d\u0000\u04a2\u04a4\u0003T$\u0000\u04a3"+ - "\u049e\u0001\u0000\u0000\u0000\u04a3\u049f\u0001\u0000\u0000\u0000\u04a3"+ - "\u04a0\u0001\u0000\u0000\u0000\u04a3\u04a1\u0001\u0000\u0000\u0000\u04a3"+ - "\u04a2\u0001\u0000\u0000\u0000\u04a4\u04a5\u0001\u0000\u0000\u0000\u04a5"+ - "\u04a3\u0001\u0000\u0000\u0000\u04a5\u04a6\u0001\u0000\u0000\u0000\u04a6"+ - "\u0145\u0001\u0000\u0000\u0000\u04a7\u04a8\u00032\u0013\u0000\u04a8\u04a9"+ - "\u0001\u0000\u0000\u0000\u04a9\u04aa\u0006\u009d\t\u0000\u04aa\u0147\u0001"+ - "\u0000\u0000\u0000\u04ab\u04ac\u00034\u0014\u0000\u04ac\u04ad\u0001\u0000"+ - "\u0000\u0000\u04ad\u04ae\u0006\u009e\t\u0000\u04ae\u0149\u0001\u0000\u0000"+ - "\u0000\u04af\u04b0\u00036\u0015\u0000\u04b0\u04b1\u0001\u0000\u0000\u0000"+ - "\u04b1\u04b2\u0006\u009f\t\u0000\u04b2\u014b\u0001\u0000\u0000\u0000:"+ - "\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\u01e5\u01ef"+ - "\u01f3\u01f6\u01ff\u0201\u020c\u0235\u023a\u0243\u024a\u024f\u0251\u025c"+ - "\u0264\u0267\u0269\u026e\u0273\u0279\u0280\u0285\u028b\u028e\u0296\u029a"+ - "\u031f\u0324\u0329\u032b\u0331\u0370\u0375\u0398\u039c\u03a1\u03a6\u03ab"+ - "\u03ad\u03b1\u03b3\u0400\u0404\u0409\u04a3\u04a5\u001a\u0005\u0002\u0000"+ - "\u0005\u0004\u0000\u0005\u0006\u0000\u0005\u0001\u0000\u0005\u0003\u0000"+ - "\u0005\n\u0000\u0005\b\u0000\u0005\u0005\u0000\u0005\t\u0000\u0000\u0001"+ - "\u0000\u0007A\u0000\u0005\u0000\u0000\u0007\u001a\u0000\u0004\u0000\u0000"+ - "\u0007B\u0000\u0007#\u0000\u0007!\u0000\u0007\u001b\u0000\u0007D\u0000"+ - "\u0007%\u0000\u0007N\u0000\u0005\u000b\u0000\u0005\u0007\u0000\u0007X"+ - "\u0000\u0007W\u0000\u0007C\u0000"; + "\u0000\n\u0140\u0001\u0000\u0000\u0000\n\u0142\u0001\u0000\u0000\u0000"+ + "\u000b\u0144\u0001\u0000\u0000\u0000\u000b\u0146\u0001\u0000\u0000\u0000"+ + "\u000b\u0148\u0001\u0000\u0000\u0000\u000b\u014a\u0001\u0000\u0000\u0000"+ + "\u000b\u014c\u0001\u0000\u0000\u0000\u000b\u014e\u0001\u0000\u0000\u0000"+ + "\f\u0150\u0001\u0000\u0000\u0000\f\u0152\u0001\u0000\u0000\u0000\f\u0154"+ + "\u0001\u0000\u0000\u0000\f\u0156\u0001\u0000\u0000\u0000\f\u0158\u0001"+ + "\u0000\u0000\u0000\r\u015a\u0001\u0000\u0000\u0000\r\u015c\u0001\u0000"+ + "\u0000\u0000\r\u015e\u0001\u0000\u0000\u0000\r\u0160\u0001\u0000\u0000"+ + "\u0000\r\u0162\u0001\u0000\u0000\u0000\r\u0164\u0001\u0000\u0000\u0000"+ + "\r\u0166\u0001\u0000\u0000\u0000\r\u0168\u0001\u0000\u0000\u0000\u000e"+ + "\u016a\u0001\u0000\u0000\u0000\u0010\u0174\u0001\u0000\u0000\u0000\u0012"+ + "\u017b\u0001\u0000\u0000\u0000\u0014\u0184\u0001\u0000\u0000\u0000\u0016"+ + "\u018b\u0001\u0000\u0000\u0000\u0018\u0195\u0001\u0000\u0000\u0000\u001a"+ + "\u019c\u0001\u0000\u0000\u0000\u001c\u01a3\u0001\u0000\u0000\u0000\u001e"+ + "\u01b1\u0001\u0000\u0000\u0000 \u01b8\u0001\u0000\u0000\u0000\"\u01c0"+ + "\u0001\u0000\u0000\u0000$\u01c7\u0001\u0000\u0000\u0000&\u01d1\u0001\u0000"+ + "\u0000\u0000(\u01dd\u0001\u0000\u0000\u0000*\u01e6\u0001\u0000\u0000\u0000"+ + ",\u01ec\u0001\u0000\u0000\u0000.\u01f3\u0001\u0000\u0000\u00000\u01fa"+ + "\u0001\u0000\u0000\u00002\u0202\u0001\u0000\u0000\u00004\u020b\u0001\u0000"+ + "\u0000\u00006\u0211\u0001\u0000\u0000\u00008\u0222\u0001\u0000\u0000\u0000"+ + ":\u0232\u0001\u0000\u0000\u0000<\u023b\u0001\u0000\u0000\u0000>\u023e"+ + "\u0001\u0000\u0000\u0000@\u0242\u0001\u0000\u0000\u0000B\u0247\u0001\u0000"+ + "\u0000\u0000D\u024c\u0001\u0000\u0000\u0000F\u0250\u0001\u0000\u0000\u0000"+ + "H\u0254\u0001\u0000\u0000\u0000J\u0258\u0001\u0000\u0000\u0000L\u025c"+ + "\u0001\u0000\u0000\u0000N\u025e\u0001\u0000\u0000\u0000P\u0260\u0001\u0000"+ + "\u0000\u0000R\u0263\u0001\u0000\u0000\u0000T\u0265\u0001\u0000\u0000\u0000"+ + "V\u026e\u0001\u0000\u0000\u0000X\u0270\u0001\u0000\u0000\u0000Z\u0275"+ + "\u0001\u0000\u0000\u0000\\\u0277\u0001\u0000\u0000\u0000^\u027c\u0001"+ + "\u0000\u0000\u0000`\u029b\u0001\u0000\u0000\u0000b\u029e\u0001\u0000\u0000"+ + "\u0000d\u02cc\u0001\u0000\u0000\u0000f\u02ce\u0001\u0000\u0000\u0000h"+ + "\u02d1\u0001\u0000\u0000\u0000j\u02d5\u0001\u0000\u0000\u0000l\u02d9\u0001"+ + "\u0000\u0000\u0000n\u02db\u0001\u0000\u0000\u0000p\u02de\u0001\u0000\u0000"+ + "\u0000r\u02e0\u0001\u0000\u0000\u0000t\u02e5\u0001\u0000\u0000\u0000v"+ + "\u02e7\u0001\u0000\u0000\u0000x\u02ed\u0001\u0000\u0000\u0000z\u02f3\u0001"+ + "\u0000\u0000\u0000|\u02f8\u0001\u0000\u0000\u0000~\u02fa\u0001\u0000\u0000"+ + "\u0000\u0080\u02fd\u0001\u0000\u0000\u0000\u0082\u0300\u0001\u0000\u0000"+ + "\u0000\u0084\u0305\u0001\u0000\u0000\u0000\u0086\u0309\u0001\u0000\u0000"+ + "\u0000\u0088\u030e\u0001\u0000\u0000\u0000\u008a\u0314\u0001\u0000\u0000"+ + "\u0000\u008c\u0317\u0001\u0000\u0000\u0000\u008e\u0319\u0001\u0000\u0000"+ + "\u0000\u0090\u031f\u0001\u0000\u0000\u0000\u0092\u0321\u0001\u0000\u0000"+ + "\u0000\u0094\u0326\u0001\u0000\u0000\u0000\u0096\u0329\u0001\u0000\u0000"+ + "\u0000\u0098\u032c\u0001\u0000\u0000\u0000\u009a\u032f\u0001\u0000\u0000"+ + "\u0000\u009c\u0331\u0001\u0000\u0000\u0000\u009e\u0334\u0001\u0000\u0000"+ + "\u0000\u00a0\u0336\u0001\u0000\u0000\u0000\u00a2\u0339\u0001\u0000\u0000"+ + "\u0000\u00a4\u033b\u0001\u0000\u0000\u0000\u00a6\u033d\u0001\u0000\u0000"+ + "\u0000\u00a8\u033f\u0001\u0000\u0000\u0000\u00aa\u0341\u0001\u0000\u0000"+ + "\u0000\u00ac\u0343\u0001\u0000\u0000\u0000\u00ae\u0348\u0001\u0000\u0000"+ + "\u0000\u00b0\u035d\u0001\u0000\u0000\u0000\u00b2\u035f\u0001\u0000\u0000"+ + "\u0000\u00b4\u0367\u0001\u0000\u0000\u0000\u00b6\u0369\u0001\u0000\u0000"+ + "\u0000\u00b8\u036d\u0001\u0000\u0000\u0000\u00ba\u0371\u0001\u0000\u0000"+ + "\u0000\u00bc\u0375\u0001\u0000\u0000\u0000\u00be\u037a\u0001\u0000\u0000"+ + "\u0000\u00c0\u037e\u0001\u0000\u0000\u0000\u00c2\u0382\u0001\u0000\u0000"+ + "\u0000\u00c4\u0386\u0001\u0000\u0000\u0000\u00c6\u038a\u0001\u0000\u0000"+ + "\u0000\u00c8\u038e\u0001\u0000\u0000\u0000\u00ca\u0396\u0001\u0000\u0000"+ + "\u0000\u00cc\u039f\u0001\u0000\u0000\u0000\u00ce\u03a3\u0001\u0000\u0000"+ + "\u0000\u00d0\u03a7\u0001\u0000\u0000\u0000\u00d2\u03ab\u0001\u0000\u0000"+ + "\u0000\u00d4\u03af\u0001\u0000\u0000\u0000\u00d6\u03b4\u0001\u0000\u0000"+ + "\u0000\u00d8\u03b8\u0001\u0000\u0000\u0000\u00da\u03c0\u0001\u0000\u0000"+ + "\u0000\u00dc\u03d5\u0001\u0000\u0000\u0000\u00de\u03d9\u0001\u0000\u0000"+ + "\u0000\u00e0\u03dd\u0001\u0000\u0000\u0000\u00e2\u03e1\u0001\u0000\u0000"+ + "\u0000\u00e4\u03e5\u0001\u0000\u0000\u0000\u00e6\u03e9\u0001\u0000\u0000"+ + "\u0000\u00e8\u03ee\u0001\u0000\u0000\u0000\u00ea\u03f2\u0001\u0000\u0000"+ + "\u0000\u00ec\u03f6\u0001\u0000\u0000\u0000\u00ee\u03fa\u0001\u0000\u0000"+ + "\u0000\u00f0\u03fd\u0001\u0000\u0000\u0000\u00f2\u0401\u0001\u0000\u0000"+ + "\u0000\u00f4\u0405\u0001\u0000\u0000\u0000\u00f6\u0409\u0001\u0000\u0000"+ + "\u0000\u00f8\u040d\u0001\u0000\u0000\u0000\u00fa\u0412\u0001\u0000\u0000"+ + "\u0000\u00fc\u0417\u0001\u0000\u0000\u0000\u00fe\u041c\u0001\u0000\u0000"+ + "\u0000\u0100\u0423\u0001\u0000\u0000\u0000\u0102\u042c\u0001\u0000\u0000"+ + "\u0000\u0104\u0433\u0001\u0000\u0000\u0000\u0106\u0437\u0001\u0000\u0000"+ + "\u0000\u0108\u043b\u0001\u0000\u0000\u0000\u010a\u043f\u0001\u0000\u0000"+ + "\u0000\u010c\u0443\u0001\u0000\u0000\u0000\u010e\u0447\u0001\u0000\u0000"+ + "\u0000\u0110\u044d\u0001\u0000\u0000\u0000\u0112\u0451\u0001\u0000\u0000"+ + "\u0000\u0114\u0455\u0001\u0000\u0000\u0000\u0116\u0459\u0001\u0000\u0000"+ + "\u0000\u0118\u045d\u0001\u0000\u0000\u0000\u011a\u0461\u0001\u0000\u0000"+ + "\u0000\u011c\u0465\u0001\u0000\u0000\u0000\u011e\u0469\u0001\u0000\u0000"+ + "\u0000\u0120\u046d\u0001\u0000\u0000\u0000\u0122\u0471\u0001\u0000\u0000"+ + "\u0000\u0124\u0476\u0001\u0000\u0000\u0000\u0126\u047a\u0001\u0000\u0000"+ + "\u0000\u0128\u047e\u0001\u0000\u0000\u0000\u012a\u0482\u0001\u0000\u0000"+ + "\u0000\u012c\u0486\u0001\u0000\u0000\u0000\u012e\u048a\u0001\u0000\u0000"+ + "\u0000\u0130\u048e\u0001\u0000\u0000\u0000\u0132\u0493\u0001\u0000\u0000"+ + "\u0000\u0134\u0498\u0001\u0000\u0000\u0000\u0136\u049c\u0001\u0000\u0000"+ + "\u0000\u0138\u04a0\u0001\u0000\u0000\u0000\u013a\u04a4\u0001\u0000\u0000"+ + "\u0000\u013c\u04a9\u0001\u0000\u0000\u0000\u013e\u04b3\u0001\u0000\u0000"+ + "\u0000\u0140\u04b7\u0001\u0000\u0000\u0000\u0142\u04bb\u0001\u0000\u0000"+ + "\u0000\u0144\u04bf\u0001\u0000\u0000\u0000\u0146\u04c4\u0001\u0000\u0000"+ + "\u0000\u0148\u04cb\u0001\u0000\u0000\u0000\u014a\u04cf\u0001\u0000\u0000"+ + "\u0000\u014c\u04d3\u0001\u0000\u0000\u0000\u014e\u04d7\u0001\u0000\u0000"+ + "\u0000\u0150\u04db\u0001\u0000\u0000\u0000\u0152\u04e0\u0001\u0000\u0000"+ + "\u0000\u0154\u04e6\u0001\u0000\u0000\u0000\u0156\u04ea\u0001\u0000\u0000"+ + "\u0000\u0158\u04ee\u0001\u0000\u0000\u0000\u015a\u04f2\u0001\u0000\u0000"+ + "\u0000\u015c\u04f8\u0001\u0000\u0000\u0000\u015e\u04fc\u0001\u0000\u0000"+ + "\u0000\u0160\u0500\u0001\u0000\u0000\u0000\u0162\u0504\u0001\u0000\u0000"+ + "\u0000\u0164\u050a\u0001\u0000\u0000\u0000\u0166\u0510\u0001\u0000\u0000"+ + "\u0000\u0168\u0516\u0001\u0000\u0000\u0000\u016a\u016b\u0005d\u0000\u0000"+ + "\u016b\u016c\u0005i\u0000\u0000\u016c\u016d\u0005s\u0000\u0000\u016d\u016e"+ + "\u0005s\u0000\u0000\u016e\u016f\u0005e\u0000\u0000\u016f\u0170\u0005c"+ + "\u0000\u0000\u0170\u0171\u0005t\u0000\u0000\u0171\u0172\u0001\u0000\u0000"+ + "\u0000\u0172\u0173\u0006\u0000\u0000\u0000\u0173\u000f\u0001\u0000\u0000"+ + "\u0000\u0174\u0175\u0005d\u0000\u0000\u0175\u0176\u0005r\u0000\u0000\u0176"+ + "\u0177\u0005o\u0000\u0000\u0177\u0178\u0005p\u0000\u0000\u0178\u0179\u0001"+ + "\u0000\u0000\u0000\u0179\u017a\u0006\u0001\u0001\u0000\u017a\u0011\u0001"+ + "\u0000\u0000\u0000\u017b\u017c\u0005e\u0000\u0000\u017c\u017d\u0005n\u0000"+ + "\u0000\u017d\u017e\u0005r\u0000\u0000\u017e\u017f\u0005i\u0000\u0000\u017f"+ + "\u0180\u0005c\u0000\u0000\u0180\u0181\u0005h\u0000\u0000\u0181\u0182\u0001"+ + "\u0000\u0000\u0000\u0182\u0183\u0006\u0002\u0002\u0000\u0183\u0013\u0001"+ + "\u0000\u0000\u0000\u0184\u0185\u0005e\u0000\u0000\u0185\u0186\u0005v\u0000"+ + "\u0000\u0186\u0187\u0005a\u0000\u0000\u0187\u0188\u0005l\u0000\u0000\u0188"+ + "\u0189\u0001\u0000\u0000\u0000\u0189\u018a\u0006\u0003\u0000\u0000\u018a"+ + "\u0015\u0001\u0000\u0000\u0000\u018b\u018c\u0005e\u0000\u0000\u018c\u018d"+ + "\u0005x\u0000\u0000\u018d\u018e\u0005p\u0000\u0000\u018e\u018f\u0005l"+ + "\u0000\u0000\u018f\u0190\u0005a\u0000\u0000\u0190\u0191\u0005i\u0000\u0000"+ + "\u0191\u0192\u0005n\u0000\u0000\u0192\u0193\u0001\u0000\u0000\u0000\u0193"+ + "\u0194\u0006\u0004\u0003\u0000\u0194\u0017\u0001\u0000\u0000\u0000\u0195"+ + "\u0196\u0005f\u0000\u0000\u0196\u0197\u0005r\u0000\u0000\u0197\u0198\u0005"+ + "o\u0000\u0000\u0198\u0199\u0005m\u0000\u0000\u0199\u019a\u0001\u0000\u0000"+ + "\u0000\u019a\u019b\u0006\u0005\u0004\u0000\u019b\u0019\u0001\u0000\u0000"+ + "\u0000\u019c\u019d\u0005g\u0000\u0000\u019d\u019e\u0005r\u0000\u0000\u019e"+ + "\u019f\u0005o\u0000\u0000\u019f\u01a0\u0005k\u0000\u0000\u01a0\u01a1\u0001"+ + "\u0000\u0000\u0000\u01a1\u01a2\u0006\u0006\u0000\u0000\u01a2\u001b\u0001"+ + "\u0000\u0000\u0000\u01a3\u01a4\u0005i\u0000\u0000\u01a4\u01a5\u0005n\u0000"+ + "\u0000\u01a5\u01a6\u0005l\u0000\u0000\u01a6\u01a7\u0005i\u0000\u0000\u01a7"+ + "\u01a8\u0005n\u0000\u0000\u01a8\u01a9\u0005e\u0000\u0000\u01a9\u01aa\u0005"+ + "s\u0000\u0000\u01aa\u01ab\u0005t\u0000\u0000\u01ab\u01ac\u0005a\u0000"+ + "\u0000\u01ac\u01ad\u0005t\u0000\u0000\u01ad\u01ae\u0005s\u0000\u0000\u01ae"+ + "\u01af\u0001\u0000\u0000\u0000\u01af\u01b0\u0006\u0007\u0000\u0000\u01b0"+ + "\u001d\u0001\u0000\u0000\u0000\u01b1\u01b2\u0005k\u0000\u0000\u01b2\u01b3"+ + "\u0005e\u0000\u0000\u01b3\u01b4\u0005e\u0000\u0000\u01b4\u01b5\u0005p"+ + "\u0000\u0000\u01b5\u01b6\u0001\u0000\u0000\u0000\u01b6\u01b7\u0006\b\u0001"+ + "\u0000\u01b7\u001f\u0001\u0000\u0000\u0000\u01b8\u01b9\u0005l\u0000\u0000"+ + "\u01b9\u01ba\u0005i\u0000\u0000\u01ba\u01bb\u0005m\u0000\u0000\u01bb\u01bc"+ + "\u0005i\u0000\u0000\u01bc\u01bd\u0005t\u0000\u0000\u01bd\u01be\u0001\u0000"+ + "\u0000\u0000\u01be\u01bf\u0006\t\u0000\u0000\u01bf!\u0001\u0000\u0000"+ + "\u0000\u01c0\u01c1\u0005m\u0000\u0000\u01c1\u01c2\u0005e\u0000\u0000\u01c2"+ + "\u01c3\u0005t\u0000\u0000\u01c3\u01c4\u0005a\u0000\u0000\u01c4\u01c5\u0001"+ + "\u0000\u0000\u0000\u01c5\u01c6\u0006\n\u0005\u0000\u01c6#\u0001\u0000"+ + "\u0000\u0000\u01c7\u01c8\u0005m\u0000\u0000\u01c8\u01c9\u0005e\u0000\u0000"+ + "\u01c9\u01ca\u0005t\u0000\u0000\u01ca\u01cb\u0005r\u0000\u0000\u01cb\u01cc"+ + "\u0005i\u0000\u0000\u01cc\u01cd\u0005c\u0000\u0000\u01cd\u01ce\u0005s"+ + "\u0000\u0000\u01ce\u01cf\u0001\u0000\u0000\u0000\u01cf\u01d0\u0006\u000b"+ + "\u0006\u0000\u01d0%\u0001\u0000\u0000\u0000\u01d1\u01d2\u0005m\u0000\u0000"+ + "\u01d2\u01d3\u0005v\u0000\u0000\u01d3\u01d4\u0005_\u0000\u0000\u01d4\u01d5"+ + "\u0005e\u0000\u0000\u01d5\u01d6\u0005x\u0000\u0000\u01d6\u01d7\u0005p"+ + "\u0000\u0000\u01d7\u01d8\u0005a\u0000\u0000\u01d8\u01d9\u0005n\u0000\u0000"+ + "\u01d9\u01da\u0005d\u0000\u0000\u01da\u01db\u0001\u0000\u0000\u0000\u01db"+ + "\u01dc\u0006\f\u0007\u0000\u01dc\'\u0001\u0000\u0000\u0000\u01dd\u01de"+ + "\u0005r\u0000\u0000\u01de\u01df\u0005e\u0000\u0000\u01df\u01e0\u0005n"+ + "\u0000\u0000\u01e0\u01e1\u0005a\u0000\u0000\u01e1\u01e2\u0005m\u0000\u0000"+ + "\u01e2\u01e3\u0005e\u0000\u0000\u01e3\u01e4\u0001\u0000\u0000\u0000\u01e4"+ + "\u01e5\u0006\r\b\u0000\u01e5)\u0001\u0000\u0000\u0000\u01e6\u01e7\u0005"+ + "r\u0000\u0000\u01e7\u01e8\u0005o\u0000\u0000\u01e8\u01e9\u0005w\u0000"+ + "\u0000\u01e9\u01ea\u0001\u0000\u0000\u0000\u01ea\u01eb\u0006\u000e\u0000"+ + "\u0000\u01eb+\u0001\u0000\u0000\u0000\u01ec\u01ed\u0005s\u0000\u0000\u01ed"+ + "\u01ee\u0005h\u0000\u0000\u01ee\u01ef\u0005o\u0000\u0000\u01ef\u01f0\u0005"+ + "w\u0000\u0000\u01f0\u01f1\u0001\u0000\u0000\u0000\u01f1\u01f2\u0006\u000f"+ + "\t\u0000\u01f2-\u0001\u0000\u0000\u0000\u01f3\u01f4\u0005s\u0000\u0000"+ + "\u01f4\u01f5\u0005o\u0000\u0000\u01f5\u01f6\u0005r\u0000\u0000\u01f6\u01f7"+ + "\u0005t\u0000\u0000\u01f7\u01f8\u0001\u0000\u0000\u0000\u01f8\u01f9\u0006"+ + "\u0010\u0000\u0000\u01f9/\u0001\u0000\u0000\u0000\u01fa\u01fb\u0005s\u0000"+ + "\u0000\u01fb\u01fc\u0005t\u0000\u0000\u01fc\u01fd\u0005a\u0000\u0000\u01fd"+ + "\u01fe\u0005t\u0000\u0000\u01fe\u01ff\u0005s\u0000\u0000\u01ff\u0200\u0001"+ + "\u0000\u0000\u0000\u0200\u0201\u0006\u0011\u0000\u0000\u02011\u0001\u0000"+ + "\u0000\u0000\u0202\u0203\u0005w\u0000\u0000\u0203\u0204\u0005h\u0000\u0000"+ + "\u0204\u0205\u0005e\u0000\u0000\u0205\u0206\u0005r\u0000\u0000\u0206\u0207"+ + "\u0005e\u0000\u0000\u0207\u0208\u0001\u0000\u0000\u0000\u0208\u0209\u0006"+ + "\u0012\u0000\u0000\u02093\u0001\u0000\u0000\u0000\u020a\u020c\b\u0000"+ + "\u0000\u0000\u020b\u020a\u0001\u0000\u0000\u0000\u020c\u020d\u0001\u0000"+ + "\u0000\u0000\u020d\u020b\u0001\u0000\u0000\u0000\u020d\u020e\u0001\u0000"+ + "\u0000\u0000\u020e\u020f\u0001\u0000\u0000\u0000\u020f\u0210\u0006\u0013"+ + "\u0000\u0000\u02105\u0001\u0000\u0000\u0000\u0211\u0212\u0005/\u0000\u0000"+ + "\u0212\u0213\u0005/\u0000\u0000\u0213\u0217\u0001\u0000\u0000\u0000\u0214"+ + "\u0216\b\u0001\u0000\u0000\u0215\u0214\u0001\u0000\u0000\u0000\u0216\u0219"+ + "\u0001\u0000\u0000\u0000\u0217\u0215\u0001\u0000\u0000\u0000\u0217\u0218"+ + "\u0001\u0000\u0000\u0000\u0218\u021b\u0001\u0000\u0000\u0000\u0219\u0217"+ + "\u0001\u0000\u0000\u0000\u021a\u021c\u0005\r\u0000\u0000\u021b\u021a\u0001"+ + "\u0000\u0000\u0000\u021b\u021c\u0001\u0000\u0000\u0000\u021c\u021e\u0001"+ + "\u0000\u0000\u0000\u021d\u021f\u0005\n\u0000\u0000\u021e\u021d\u0001\u0000"+ + "\u0000\u0000\u021e\u021f\u0001\u0000\u0000\u0000\u021f\u0220\u0001\u0000"+ + "\u0000\u0000\u0220\u0221\u0006\u0014\n\u0000\u02217\u0001\u0000\u0000"+ + "\u0000\u0222\u0223\u0005/\u0000\u0000\u0223\u0224\u0005*\u0000\u0000\u0224"+ + "\u0229\u0001\u0000\u0000\u0000\u0225\u0228\u00038\u0015\u0000\u0226\u0228"+ + "\t\u0000\u0000\u0000\u0227\u0225\u0001\u0000\u0000\u0000\u0227\u0226\u0001"+ + "\u0000\u0000\u0000\u0228\u022b\u0001\u0000\u0000\u0000\u0229\u022a\u0001"+ + "\u0000\u0000\u0000\u0229\u0227\u0001\u0000\u0000\u0000\u022a\u022c\u0001"+ + "\u0000\u0000\u0000\u022b\u0229\u0001\u0000\u0000\u0000\u022c\u022d\u0005"+ + "*\u0000\u0000\u022d\u022e\u0005/\u0000\u0000\u022e\u022f\u0001\u0000\u0000"+ + "\u0000\u022f\u0230\u0006\u0015\n\u0000\u02309\u0001\u0000\u0000\u0000"+ + "\u0231\u0233\u0007\u0002\u0000\u0000\u0232\u0231\u0001\u0000\u0000\u0000"+ + "\u0233\u0234\u0001\u0000\u0000\u0000\u0234\u0232\u0001\u0000\u0000\u0000"+ + "\u0234\u0235\u0001\u0000\u0000\u0000\u0235\u0236\u0001\u0000\u0000\u0000"+ + "\u0236\u0237\u0006\u0016\n\u0000\u0237;\u0001\u0000\u0000\u0000\u0238"+ + "\u023c\b\u0003\u0000\u0000\u0239\u023a\u0005/\u0000\u0000\u023a\u023c"+ + "\b\u0004\u0000\u0000\u023b\u0238\u0001\u0000\u0000\u0000\u023b\u0239\u0001"+ + "\u0000\u0000\u0000\u023c=\u0001\u0000\u0000\u0000\u023d\u023f\u0003<\u0017"+ + "\u0000\u023e\u023d\u0001\u0000\u0000\u0000\u023f\u0240\u0001\u0000\u0000"+ + "\u0000\u0240\u023e\u0001\u0000\u0000\u0000\u0240\u0241\u0001\u0000\u0000"+ + "\u0000\u0241?\u0001\u0000\u0000\u0000\u0242\u0243\u0003\u00acO\u0000\u0243"+ + "\u0244\u0001\u0000\u0000\u0000\u0244\u0245\u0006\u0019\u000b\u0000\u0245"+ + "\u0246\u0006\u0019\f\u0000\u0246A\u0001\u0000\u0000\u0000\u0247\u0248"+ + "\u0003J\u001e\u0000\u0248\u0249\u0001\u0000\u0000\u0000\u0249\u024a\u0006"+ + "\u001a\r\u0000\u024a\u024b\u0006\u001a\u000e\u0000\u024bC\u0001\u0000"+ + "\u0000\u0000\u024c\u024d\u0003:\u0016\u0000\u024d\u024e\u0001\u0000\u0000"+ + "\u0000\u024e\u024f\u0006\u001b\n\u0000\u024fE\u0001\u0000\u0000\u0000"+ + "\u0250\u0251\u00036\u0014\u0000\u0251\u0252\u0001\u0000\u0000\u0000\u0252"+ + "\u0253\u0006\u001c\n\u0000\u0253G\u0001\u0000\u0000\u0000\u0254\u0255"+ + "\u00038\u0015\u0000\u0255\u0256\u0001\u0000\u0000\u0000\u0256\u0257\u0006"+ + "\u001d\n\u0000\u0257I\u0001\u0000\u0000\u0000\u0258\u0259\u0005|\u0000"+ + "\u0000\u0259\u025a\u0001\u0000\u0000\u0000\u025a\u025b\u0006\u001e\u000e"+ + "\u0000\u025bK\u0001\u0000\u0000\u0000\u025c\u025d\u0007\u0005\u0000\u0000"+ + "\u025dM\u0001\u0000\u0000\u0000\u025e\u025f\u0007\u0006\u0000\u0000\u025f"+ + "O\u0001\u0000\u0000\u0000\u0260\u0261\u0005\\\u0000\u0000\u0261\u0262"+ + "\u0007\u0007\u0000\u0000\u0262Q\u0001\u0000\u0000\u0000\u0263\u0264\b"+ + "\b\u0000\u0000\u0264S\u0001\u0000\u0000\u0000\u0265\u0267\u0007\t\u0000"+ + "\u0000\u0266\u0268\u0007\n\u0000\u0000\u0267\u0266\u0001\u0000\u0000\u0000"+ + "\u0267\u0268\u0001\u0000\u0000\u0000\u0268\u026a\u0001\u0000\u0000\u0000"+ + "\u0269\u026b\u0003L\u001f\u0000\u026a\u0269\u0001\u0000\u0000\u0000\u026b"+ + "\u026c\u0001\u0000\u0000\u0000\u026c\u026a\u0001\u0000\u0000\u0000\u026c"+ + "\u026d\u0001\u0000\u0000\u0000\u026dU\u0001\u0000\u0000\u0000\u026e\u026f"+ + "\u0005@\u0000\u0000\u026fW\u0001\u0000\u0000\u0000\u0270\u0271\u0005`"+ + "\u0000\u0000\u0271Y\u0001\u0000\u0000\u0000\u0272\u0276\b\u000b\u0000"+ + "\u0000\u0273\u0274\u0005`\u0000\u0000\u0274\u0276\u0005`\u0000\u0000\u0275"+ + "\u0272\u0001\u0000\u0000\u0000\u0275\u0273\u0001\u0000\u0000\u0000\u0276"+ + "[\u0001\u0000\u0000\u0000\u0277\u0278\u0005_\u0000\u0000\u0278]\u0001"+ + "\u0000\u0000\u0000\u0279\u027d\u0003N \u0000\u027a\u027d\u0003L\u001f"+ + "\u0000\u027b\u027d\u0003\\\'\u0000\u027c\u0279\u0001\u0000\u0000\u0000"+ + "\u027c\u027a\u0001\u0000\u0000\u0000\u027c\u027b\u0001\u0000\u0000\u0000"+ + "\u027d_\u0001\u0000\u0000\u0000\u027e\u0283\u0005\"\u0000\u0000\u027f"+ + "\u0282\u0003P!\u0000\u0280\u0282\u0003R\"\u0000\u0281\u027f\u0001\u0000"+ + "\u0000\u0000\u0281\u0280\u0001\u0000\u0000\u0000\u0282\u0285\u0001\u0000"+ + "\u0000\u0000\u0283\u0281\u0001\u0000\u0000\u0000\u0283\u0284\u0001\u0000"+ + "\u0000\u0000\u0284\u0286\u0001\u0000\u0000\u0000\u0285\u0283\u0001\u0000"+ + "\u0000\u0000\u0286\u029c\u0005\"\u0000\u0000\u0287\u0288\u0005\"\u0000"+ + "\u0000\u0288\u0289\u0005\"\u0000\u0000\u0289\u028a\u0005\"\u0000\u0000"+ + "\u028a\u028e\u0001\u0000\u0000\u0000\u028b\u028d\b\u0001\u0000\u0000\u028c"+ + "\u028b\u0001\u0000\u0000\u0000\u028d\u0290\u0001\u0000\u0000\u0000\u028e"+ + "\u028f\u0001\u0000\u0000\u0000\u028e\u028c\u0001\u0000\u0000\u0000\u028f"+ + "\u0291\u0001\u0000\u0000\u0000\u0290\u028e\u0001\u0000\u0000\u0000\u0291"+ + "\u0292\u0005\"\u0000\u0000\u0292\u0293\u0005\"\u0000\u0000\u0293\u0294"+ + "\u0005\"\u0000\u0000\u0294\u0296\u0001\u0000\u0000\u0000\u0295\u0297\u0005"+ + "\"\u0000\u0000\u0296\u0295\u0001\u0000\u0000\u0000\u0296\u0297\u0001\u0000"+ + "\u0000\u0000\u0297\u0299\u0001\u0000\u0000\u0000\u0298\u029a\u0005\"\u0000"+ + "\u0000\u0299\u0298\u0001\u0000\u0000\u0000\u0299\u029a\u0001\u0000\u0000"+ + "\u0000\u029a\u029c\u0001\u0000\u0000\u0000\u029b\u027e\u0001\u0000\u0000"+ + "\u0000\u029b\u0287\u0001\u0000\u0000\u0000\u029ca\u0001\u0000\u0000\u0000"+ + "\u029d\u029f\u0003L\u001f\u0000\u029e\u029d\u0001\u0000\u0000\u0000\u029f"+ + "\u02a0\u0001\u0000\u0000\u0000\u02a0\u029e\u0001\u0000\u0000\u0000\u02a0"+ + "\u02a1\u0001\u0000\u0000\u0000\u02a1c\u0001\u0000\u0000\u0000\u02a2\u02a4"+ + "\u0003L\u001f\u0000\u02a3\u02a2\u0001\u0000\u0000\u0000\u02a4\u02a5\u0001"+ + "\u0000\u0000\u0000\u02a5\u02a3\u0001\u0000\u0000\u0000\u02a5\u02a6\u0001"+ + "\u0000\u0000\u0000\u02a6\u02a7\u0001\u0000\u0000\u0000\u02a7\u02ab\u0003"+ + "t3\u0000\u02a8\u02aa\u0003L\u001f\u0000\u02a9\u02a8\u0001\u0000\u0000"+ + "\u0000\u02aa\u02ad\u0001\u0000\u0000\u0000\u02ab\u02a9\u0001\u0000\u0000"+ + "\u0000\u02ab\u02ac\u0001\u0000\u0000\u0000\u02ac\u02cd\u0001\u0000\u0000"+ + "\u0000\u02ad\u02ab\u0001\u0000\u0000\u0000\u02ae\u02b0\u0003t3\u0000\u02af"+ + "\u02b1\u0003L\u001f\u0000\u02b0\u02af\u0001\u0000\u0000\u0000\u02b1\u02b2"+ + "\u0001\u0000\u0000\u0000\u02b2\u02b0\u0001\u0000\u0000\u0000\u02b2\u02b3"+ + "\u0001\u0000\u0000\u0000\u02b3\u02cd\u0001\u0000\u0000\u0000\u02b4\u02b6"+ + "\u0003L\u001f\u0000\u02b5\u02b4\u0001\u0000\u0000\u0000\u02b6\u02b7\u0001"+ + "\u0000\u0000\u0000\u02b7\u02b5\u0001\u0000\u0000\u0000\u02b7\u02b8\u0001"+ + "\u0000\u0000\u0000\u02b8\u02c0\u0001\u0000\u0000\u0000\u02b9\u02bd\u0003"+ + "t3\u0000\u02ba\u02bc\u0003L\u001f\u0000\u02bb\u02ba\u0001\u0000\u0000"+ + "\u0000\u02bc\u02bf\u0001\u0000\u0000\u0000\u02bd\u02bb\u0001\u0000\u0000"+ + "\u0000\u02bd\u02be\u0001\u0000\u0000\u0000\u02be\u02c1\u0001\u0000\u0000"+ + "\u0000\u02bf\u02bd\u0001\u0000\u0000\u0000\u02c0\u02b9\u0001\u0000\u0000"+ + "\u0000\u02c0\u02c1\u0001\u0000\u0000\u0000\u02c1\u02c2\u0001\u0000\u0000"+ + "\u0000\u02c2\u02c3\u0003T#\u0000\u02c3\u02cd\u0001\u0000\u0000\u0000\u02c4"+ + "\u02c6\u0003t3\u0000\u02c5\u02c7\u0003L\u001f\u0000\u02c6\u02c5\u0001"+ + "\u0000\u0000\u0000\u02c7\u02c8\u0001\u0000\u0000\u0000\u02c8\u02c6\u0001"+ + "\u0000\u0000\u0000\u02c8\u02c9\u0001\u0000\u0000\u0000\u02c9\u02ca\u0001"+ + "\u0000\u0000\u0000\u02ca\u02cb\u0003T#\u0000\u02cb\u02cd\u0001\u0000\u0000"+ + "\u0000\u02cc\u02a3\u0001\u0000\u0000\u0000\u02cc\u02ae\u0001\u0000\u0000"+ + "\u0000\u02cc\u02b5\u0001\u0000\u0000\u0000\u02cc\u02c4\u0001\u0000\u0000"+ + "\u0000\u02cde\u0001\u0000\u0000\u0000\u02ce\u02cf\u0005b\u0000\u0000\u02cf"+ + "\u02d0\u0005y\u0000\u0000\u02d0g\u0001\u0000\u0000\u0000\u02d1\u02d2\u0005"+ + "a\u0000\u0000\u02d2\u02d3\u0005n\u0000\u0000\u02d3\u02d4\u0005d\u0000"+ + "\u0000\u02d4i\u0001\u0000\u0000\u0000\u02d5\u02d6\u0005a\u0000\u0000\u02d6"+ + "\u02d7\u0005s\u0000\u0000\u02d7\u02d8\u0005c\u0000\u0000\u02d8k\u0001"+ + "\u0000\u0000\u0000\u02d9\u02da\u0005=\u0000\u0000\u02dam\u0001\u0000\u0000"+ + "\u0000\u02db\u02dc\u0005:\u0000\u0000\u02dc\u02dd\u0005:\u0000\u0000\u02dd"+ + "o\u0001\u0000\u0000\u0000\u02de\u02df\u0005,\u0000\u0000\u02dfq\u0001"+ + "\u0000\u0000\u0000\u02e0\u02e1\u0005d\u0000\u0000\u02e1\u02e2\u0005e\u0000"+ + "\u0000\u02e2\u02e3\u0005s\u0000\u0000\u02e3\u02e4\u0005c\u0000\u0000\u02e4"+ + "s\u0001\u0000\u0000\u0000\u02e5\u02e6\u0005.\u0000\u0000\u02e6u\u0001"+ + "\u0000\u0000\u0000\u02e7\u02e8\u0005f\u0000\u0000\u02e8\u02e9\u0005a\u0000"+ + "\u0000\u02e9\u02ea\u0005l\u0000\u0000\u02ea\u02eb\u0005s\u0000\u0000\u02eb"+ + "\u02ec\u0005e\u0000\u0000\u02ecw\u0001\u0000\u0000\u0000\u02ed\u02ee\u0005"+ + "f\u0000\u0000\u02ee\u02ef\u0005i\u0000\u0000\u02ef\u02f0\u0005r\u0000"+ + "\u0000\u02f0\u02f1\u0005s\u0000\u0000\u02f1\u02f2\u0005t\u0000\u0000\u02f2"+ + "y\u0001\u0000\u0000\u0000\u02f3\u02f4\u0005l\u0000\u0000\u02f4\u02f5\u0005"+ + "a\u0000\u0000\u02f5\u02f6\u0005s\u0000\u0000\u02f6\u02f7\u0005t\u0000"+ + "\u0000\u02f7{\u0001\u0000\u0000\u0000\u02f8\u02f9\u0005(\u0000\u0000\u02f9"+ + "}\u0001\u0000\u0000\u0000\u02fa\u02fb\u0005i\u0000\u0000\u02fb\u02fc\u0005"+ + "n\u0000\u0000\u02fc\u007f\u0001\u0000\u0000\u0000\u02fd\u02fe\u0005i\u0000"+ + "\u0000\u02fe\u02ff\u0005s\u0000\u0000\u02ff\u0081\u0001\u0000\u0000\u0000"+ + "\u0300\u0301\u0005l\u0000\u0000\u0301\u0302\u0005i\u0000\u0000\u0302\u0303"+ + "\u0005k\u0000\u0000\u0303\u0304\u0005e\u0000\u0000\u0304\u0083\u0001\u0000"+ + "\u0000\u0000\u0305\u0306\u0005n\u0000\u0000\u0306\u0307\u0005o\u0000\u0000"+ + "\u0307\u0308\u0005t\u0000\u0000\u0308\u0085\u0001\u0000\u0000\u0000\u0309"+ + "\u030a\u0005n\u0000\u0000\u030a\u030b\u0005u\u0000\u0000\u030b\u030c\u0005"+ + "l\u0000\u0000\u030c\u030d\u0005l\u0000\u0000\u030d\u0087\u0001\u0000\u0000"+ + "\u0000\u030e\u030f\u0005n\u0000\u0000\u030f\u0310\u0005u\u0000\u0000\u0310"+ + "\u0311\u0005l\u0000\u0000\u0311\u0312\u0005l\u0000\u0000\u0312\u0313\u0005"+ + "s\u0000\u0000\u0313\u0089\u0001\u0000\u0000\u0000\u0314\u0315\u0005o\u0000"+ + "\u0000\u0315\u0316\u0005r\u0000\u0000\u0316\u008b\u0001\u0000\u0000\u0000"+ + "\u0317\u0318\u0005?\u0000\u0000\u0318\u008d\u0001\u0000\u0000\u0000\u0319"+ + "\u031a\u0005r\u0000\u0000\u031a\u031b\u0005l\u0000\u0000\u031b\u031c\u0005"+ + "i\u0000\u0000\u031c\u031d\u0005k\u0000\u0000\u031d\u031e\u0005e\u0000"+ + "\u0000\u031e\u008f\u0001\u0000\u0000\u0000\u031f\u0320\u0005)\u0000\u0000"+ + "\u0320\u0091\u0001\u0000\u0000\u0000\u0321\u0322\u0005t\u0000\u0000\u0322"+ + "\u0323\u0005r\u0000\u0000\u0323\u0324\u0005u\u0000\u0000\u0324\u0325\u0005"+ + "e\u0000\u0000\u0325\u0093\u0001\u0000\u0000\u0000\u0326\u0327\u0005=\u0000"+ + "\u0000\u0327\u0328\u0005=\u0000\u0000\u0328\u0095\u0001\u0000\u0000\u0000"+ + "\u0329\u032a\u0005=\u0000\u0000\u032a\u032b\u0005~\u0000\u0000\u032b\u0097"+ + "\u0001\u0000\u0000\u0000\u032c\u032d\u0005!\u0000\u0000\u032d\u032e\u0005"+ + "=\u0000\u0000\u032e\u0099\u0001\u0000\u0000\u0000\u032f\u0330\u0005<\u0000"+ + "\u0000\u0330\u009b\u0001\u0000\u0000\u0000\u0331\u0332\u0005<\u0000\u0000"+ + "\u0332\u0333\u0005=\u0000\u0000\u0333\u009d\u0001\u0000\u0000\u0000\u0334"+ + "\u0335\u0005>\u0000\u0000\u0335\u009f\u0001\u0000\u0000\u0000\u0336\u0337"+ + "\u0005>\u0000\u0000\u0337\u0338\u0005=\u0000\u0000\u0338\u00a1\u0001\u0000"+ + "\u0000\u0000\u0339\u033a\u0005+\u0000\u0000\u033a\u00a3\u0001\u0000\u0000"+ + "\u0000\u033b\u033c\u0005-\u0000\u0000\u033c\u00a5\u0001\u0000\u0000\u0000"+ + "\u033d\u033e\u0005*\u0000\u0000\u033e\u00a7\u0001\u0000\u0000\u0000\u033f"+ + "\u0340\u0005/\u0000\u0000\u0340\u00a9\u0001\u0000\u0000\u0000\u0341\u0342"+ + "\u0005%\u0000\u0000\u0342\u00ab\u0001\u0000\u0000\u0000\u0343\u0344\u0005"+ + "[\u0000\u0000\u0344\u0345\u0001\u0000\u0000\u0000\u0345\u0346\u0006O\u0000"+ + "\u0000\u0346\u0347\u0006O\u0000\u0000\u0347\u00ad\u0001\u0000\u0000\u0000"+ + "\u0348\u0349\u0005]\u0000\u0000\u0349\u034a\u0001\u0000\u0000\u0000\u034a"+ + "\u034b\u0006P\u000e\u0000\u034b\u034c\u0006P\u000e\u0000\u034c\u00af\u0001"+ + "\u0000\u0000\u0000\u034d\u0351\u0003N \u0000\u034e\u0350\u0003^(\u0000"+ + "\u034f\u034e\u0001\u0000\u0000\u0000\u0350\u0353\u0001\u0000\u0000\u0000"+ + "\u0351\u034f\u0001\u0000\u0000\u0000\u0351\u0352\u0001\u0000\u0000\u0000"+ + "\u0352\u035e\u0001\u0000\u0000\u0000\u0353\u0351\u0001\u0000\u0000\u0000"+ + "\u0354\u0357\u0003\\\'\u0000\u0355\u0357\u0003V$\u0000\u0356\u0354\u0001"+ + "\u0000\u0000\u0000\u0356\u0355\u0001\u0000\u0000\u0000\u0357\u0359\u0001"+ + "\u0000\u0000\u0000\u0358\u035a\u0003^(\u0000\u0359\u0358\u0001\u0000\u0000"+ + "\u0000\u035a\u035b\u0001\u0000\u0000\u0000\u035b\u0359\u0001\u0000\u0000"+ + "\u0000\u035b\u035c\u0001\u0000\u0000\u0000\u035c\u035e\u0001\u0000\u0000"+ + "\u0000\u035d\u034d\u0001\u0000\u0000\u0000\u035d\u0356\u0001\u0000\u0000"+ + "\u0000\u035e\u00b1\u0001\u0000\u0000\u0000\u035f\u0361\u0003X%\u0000\u0360"+ + "\u0362\u0003Z&\u0000\u0361\u0360\u0001\u0000\u0000\u0000\u0362\u0363\u0001"+ + "\u0000\u0000\u0000\u0363\u0361\u0001\u0000\u0000\u0000\u0363\u0364\u0001"+ + "\u0000\u0000\u0000\u0364\u0365\u0001\u0000\u0000\u0000\u0365\u0366\u0003"+ + "X%\u0000\u0366\u00b3\u0001\u0000\u0000\u0000\u0367\u0368\u0003\u00b2R"+ + "\u0000\u0368\u00b5\u0001\u0000\u0000\u0000\u0369\u036a\u00036\u0014\u0000"+ + "\u036a\u036b\u0001\u0000\u0000\u0000\u036b\u036c\u0006T\n\u0000\u036c"+ + "\u00b7\u0001\u0000\u0000\u0000\u036d\u036e\u00038\u0015\u0000\u036e\u036f"+ + "\u0001\u0000\u0000\u0000\u036f\u0370\u0006U\n\u0000\u0370\u00b9\u0001"+ + "\u0000\u0000\u0000\u0371\u0372\u0003:\u0016\u0000\u0372\u0373\u0001\u0000"+ + "\u0000\u0000\u0373\u0374\u0006V\n\u0000\u0374\u00bb\u0001\u0000\u0000"+ + "\u0000\u0375\u0376\u0003J\u001e\u0000\u0376\u0377\u0001\u0000\u0000\u0000"+ + "\u0377\u0378\u0006W\r\u0000\u0378\u0379\u0006W\u000e\u0000\u0379\u00bd"+ + "\u0001\u0000\u0000\u0000\u037a\u037b\u0003\u00acO\u0000\u037b\u037c\u0001"+ + "\u0000\u0000\u0000\u037c\u037d\u0006X\u000b\u0000\u037d\u00bf\u0001\u0000"+ + "\u0000\u0000\u037e\u037f\u0003\u00aeP\u0000\u037f\u0380\u0001\u0000\u0000"+ + "\u0000\u0380\u0381\u0006Y\u000f\u0000\u0381\u00c1\u0001\u0000\u0000\u0000"+ + "\u0382\u0383\u0003p1\u0000\u0383\u0384\u0001\u0000\u0000\u0000\u0384\u0385"+ + "\u0006Z\u0010\u0000\u0385\u00c3\u0001\u0000\u0000\u0000\u0386\u0387\u0003"+ + "l/\u0000\u0387\u0388\u0001\u0000\u0000\u0000\u0388\u0389\u0006[\u0011"+ + "\u0000\u0389\u00c5\u0001\u0000\u0000\u0000\u038a\u038b\u0003`)\u0000\u038b"+ + "\u038c\u0001\u0000\u0000\u0000\u038c\u038d\u0006\\\u0012\u0000\u038d\u00c7"+ + "\u0001\u0000\u0000\u0000\u038e\u038f\u0005o\u0000\u0000\u038f\u0390\u0005"+ + "p\u0000\u0000\u0390\u0391\u0005t\u0000\u0000\u0391\u0392\u0005i\u0000"+ + "\u0000\u0392\u0393\u0005o\u0000\u0000\u0393\u0394\u0005n\u0000\u0000\u0394"+ + "\u0395\u0005s\u0000\u0000\u0395\u00c9\u0001\u0000\u0000\u0000\u0396\u0397"+ + "\u0005m\u0000\u0000\u0397\u0398\u0005e\u0000\u0000\u0398\u0399\u0005t"+ + "\u0000\u0000\u0399\u039a\u0005a\u0000\u0000\u039a\u039b\u0005d\u0000\u0000"+ + "\u039b\u039c\u0005a\u0000\u0000\u039c\u039d\u0005t\u0000\u0000\u039d\u039e"+ + "\u0005a\u0000\u0000\u039e\u00cb\u0001\u0000\u0000\u0000\u039f\u03a0\u0003"+ + ">\u0018\u0000\u03a0\u03a1\u0001\u0000\u0000\u0000\u03a1\u03a2\u0006_\u0013"+ + "\u0000\u03a2\u00cd\u0001\u0000\u0000\u0000\u03a3\u03a4\u00036\u0014\u0000"+ + "\u03a4\u03a5\u0001\u0000\u0000\u0000\u03a5\u03a6\u0006`\n\u0000\u03a6"+ + "\u00cf\u0001\u0000\u0000\u0000\u03a7\u03a8\u00038\u0015\u0000\u03a8\u03a9"+ + "\u0001\u0000\u0000\u0000\u03a9\u03aa\u0006a\n\u0000\u03aa\u00d1\u0001"+ + "\u0000\u0000\u0000\u03ab\u03ac\u0003:\u0016\u0000\u03ac\u03ad\u0001\u0000"+ + "\u0000\u0000\u03ad\u03ae\u0006b\n\u0000\u03ae\u00d3\u0001\u0000\u0000"+ + "\u0000\u03af\u03b0\u0003J\u001e\u0000\u03b0\u03b1\u0001\u0000\u0000\u0000"+ + "\u03b1\u03b2\u0006c\r\u0000\u03b2\u03b3\u0006c\u000e\u0000\u03b3\u00d5"+ + "\u0001\u0000\u0000\u0000\u03b4\u03b5\u0003t3\u0000\u03b5\u03b6\u0001\u0000"+ + "\u0000\u0000\u03b6\u03b7\u0006d\u0014\u0000\u03b7\u00d7\u0001\u0000\u0000"+ + "\u0000\u03b8\u03b9\u0003p1\u0000\u03b9\u03ba\u0001\u0000\u0000\u0000\u03ba"+ + "\u03bb\u0006e\u0010\u0000\u03bb\u00d9\u0001\u0000\u0000\u0000\u03bc\u03c1"+ + "\u0003N \u0000\u03bd\u03c1\u0003L\u001f\u0000\u03be\u03c1\u0003\\\'\u0000"+ + "\u03bf\u03c1\u0003\u00a6L\u0000\u03c0\u03bc\u0001\u0000\u0000\u0000\u03c0"+ + "\u03bd\u0001\u0000\u0000\u0000\u03c0\u03be\u0001\u0000\u0000\u0000\u03c0"+ + "\u03bf\u0001\u0000\u0000\u0000\u03c1\u00db\u0001\u0000\u0000\u0000\u03c2"+ + "\u03c5\u0003N \u0000\u03c3\u03c5\u0003\u00a6L\u0000\u03c4\u03c2\u0001"+ + "\u0000\u0000\u0000\u03c4\u03c3\u0001\u0000\u0000\u0000\u03c5\u03c9\u0001"+ + "\u0000\u0000\u0000\u03c6\u03c8\u0003\u00daf\u0000\u03c7\u03c6\u0001\u0000"+ + "\u0000\u0000\u03c8\u03cb\u0001\u0000\u0000\u0000\u03c9\u03c7\u0001\u0000"+ + "\u0000\u0000\u03c9\u03ca\u0001\u0000\u0000\u0000\u03ca\u03d6\u0001\u0000"+ + "\u0000\u0000\u03cb\u03c9\u0001\u0000\u0000\u0000\u03cc\u03cf\u0003\\\'"+ + "\u0000\u03cd\u03cf\u0003V$\u0000\u03ce\u03cc\u0001\u0000\u0000\u0000\u03ce"+ + "\u03cd\u0001\u0000\u0000\u0000\u03cf\u03d1\u0001\u0000\u0000\u0000\u03d0"+ + "\u03d2\u0003\u00daf\u0000\u03d1\u03d0\u0001\u0000\u0000\u0000\u03d2\u03d3"+ + "\u0001\u0000\u0000\u0000\u03d3\u03d1\u0001\u0000\u0000\u0000\u03d3\u03d4"+ + "\u0001\u0000\u0000\u0000\u03d4\u03d6\u0001\u0000\u0000\u0000\u03d5\u03c4"+ + "\u0001\u0000\u0000\u0000\u03d5\u03ce\u0001\u0000\u0000\u0000\u03d6\u00dd"+ + "\u0001\u0000\u0000\u0000\u03d7\u03da\u0003\u00dcg\u0000\u03d8\u03da\u0003"+ + "\u00b2R\u0000\u03d9\u03d7\u0001\u0000\u0000\u0000\u03d9\u03d8\u0001\u0000"+ + "\u0000\u0000\u03da\u03db\u0001\u0000\u0000\u0000\u03db\u03d9\u0001\u0000"+ + "\u0000\u0000\u03db\u03dc\u0001\u0000\u0000\u0000\u03dc\u00df\u0001\u0000"+ + "\u0000\u0000\u03dd\u03de\u00036\u0014\u0000\u03de\u03df\u0001\u0000\u0000"+ + "\u0000\u03df\u03e0\u0006i\n\u0000\u03e0\u00e1\u0001\u0000\u0000\u0000"+ + "\u03e1\u03e2\u00038\u0015\u0000\u03e2\u03e3\u0001\u0000\u0000\u0000\u03e3"+ + "\u03e4\u0006j\n\u0000\u03e4\u00e3\u0001\u0000\u0000\u0000\u03e5\u03e6"+ + "\u0003:\u0016\u0000\u03e6\u03e7\u0001\u0000\u0000\u0000\u03e7\u03e8\u0006"+ + "k\n\u0000\u03e8\u00e5\u0001\u0000\u0000\u0000\u03e9\u03ea\u0003J\u001e"+ + "\u0000\u03ea\u03eb\u0001\u0000\u0000\u0000\u03eb\u03ec\u0006l\r\u0000"+ + "\u03ec\u03ed\u0006l\u000e\u0000\u03ed\u00e7\u0001\u0000\u0000\u0000\u03ee"+ + "\u03ef\u0003l/\u0000\u03ef\u03f0\u0001\u0000\u0000\u0000\u03f0\u03f1\u0006"+ + "m\u0011\u0000\u03f1\u00e9\u0001\u0000\u0000\u0000\u03f2\u03f3\u0003p1"+ + "\u0000\u03f3\u03f4\u0001\u0000\u0000\u0000\u03f4\u03f5\u0006n\u0010\u0000"+ + "\u03f5\u00eb\u0001\u0000\u0000\u0000\u03f6\u03f7\u0003t3\u0000\u03f7\u03f8"+ + "\u0001\u0000\u0000\u0000\u03f8\u03f9\u0006o\u0014\u0000\u03f9\u00ed\u0001"+ + "\u0000\u0000\u0000\u03fa\u03fb\u0005a\u0000\u0000\u03fb\u03fc\u0005s\u0000"+ + "\u0000\u03fc\u00ef\u0001\u0000\u0000\u0000\u03fd\u03fe\u0003\u00deh\u0000"+ + "\u03fe\u03ff\u0001\u0000\u0000\u0000\u03ff\u0400\u0006q\u0015\u0000\u0400"+ + "\u00f1\u0001\u0000\u0000\u0000\u0401\u0402\u00036\u0014\u0000\u0402\u0403"+ + "\u0001\u0000\u0000\u0000\u0403\u0404\u0006r\n\u0000\u0404\u00f3\u0001"+ + "\u0000\u0000\u0000\u0405\u0406\u00038\u0015\u0000\u0406\u0407\u0001\u0000"+ + "\u0000\u0000\u0407\u0408\u0006s\n\u0000\u0408\u00f5\u0001\u0000\u0000"+ + "\u0000\u0409\u040a\u0003:\u0016\u0000\u040a\u040b\u0001\u0000\u0000\u0000"+ + "\u040b\u040c\u0006t\n\u0000\u040c\u00f7\u0001\u0000\u0000\u0000\u040d"+ + "\u040e\u0003J\u001e\u0000\u040e\u040f\u0001\u0000\u0000\u0000\u040f\u0410"+ + "\u0006u\r\u0000\u0410\u0411\u0006u\u000e\u0000\u0411\u00f9\u0001\u0000"+ + "\u0000\u0000\u0412\u0413\u0003\u00acO\u0000\u0413\u0414\u0001\u0000\u0000"+ + "\u0000\u0414\u0415\u0006v\u000b\u0000\u0415\u0416\u0006v\u0016\u0000\u0416"+ + "\u00fb\u0001\u0000\u0000\u0000\u0417\u0418\u0005o\u0000\u0000\u0418\u0419"+ + "\u0005n\u0000\u0000\u0419\u041a\u0001\u0000\u0000\u0000\u041a\u041b\u0006"+ + "w\u0017\u0000\u041b\u00fd\u0001\u0000\u0000\u0000\u041c\u041d\u0005w\u0000"+ + "\u0000\u041d\u041e\u0005i\u0000\u0000\u041e\u041f\u0005t\u0000\u0000\u041f"+ + "\u0420\u0005h\u0000\u0000\u0420\u0421\u0001\u0000\u0000\u0000\u0421\u0422"+ + "\u0006x\u0017\u0000\u0422\u00ff\u0001\u0000\u0000\u0000\u0423\u0424\b"+ + "\f\u0000\u0000\u0424\u0101\u0001\u0000\u0000\u0000\u0425\u0427\u0003\u0100"+ + "y\u0000\u0426\u0425\u0001\u0000\u0000\u0000\u0427\u0428\u0001\u0000\u0000"+ + "\u0000\u0428\u0426\u0001\u0000\u0000\u0000\u0428\u0429\u0001\u0000\u0000"+ + "\u0000\u0429\u042a\u0001\u0000\u0000\u0000\u042a\u042b\u0003\u0146\u009c"+ + "\u0000\u042b\u042d\u0001\u0000\u0000\u0000\u042c\u0426\u0001\u0000\u0000"+ + "\u0000\u042c\u042d\u0001\u0000\u0000\u0000\u042d\u042f\u0001\u0000\u0000"+ + "\u0000\u042e\u0430\u0003\u0100y\u0000\u042f\u042e\u0001\u0000\u0000\u0000"+ + "\u0430\u0431\u0001\u0000\u0000\u0000\u0431\u042f\u0001\u0000\u0000\u0000"+ + "\u0431\u0432\u0001\u0000\u0000\u0000\u0432\u0103\u0001\u0000\u0000\u0000"+ + "\u0433\u0434\u0003\u00b4S\u0000\u0434\u0435\u0001\u0000\u0000\u0000\u0435"+ + "\u0436\u0006{\u0018\u0000\u0436\u0105\u0001\u0000\u0000\u0000\u0437\u0438"+ + "\u0003\u0102z\u0000\u0438\u0439\u0001\u0000\u0000\u0000\u0439\u043a\u0006"+ + "|\u0019\u0000\u043a\u0107\u0001\u0000\u0000\u0000\u043b\u043c\u00036\u0014"+ + "\u0000\u043c\u043d\u0001\u0000\u0000\u0000\u043d\u043e\u0006}\n\u0000"+ + "\u043e\u0109\u0001\u0000\u0000\u0000\u043f\u0440\u00038\u0015\u0000\u0440"+ + "\u0441\u0001\u0000\u0000\u0000\u0441\u0442\u0006~\n\u0000\u0442\u010b"+ + "\u0001\u0000\u0000\u0000\u0443\u0444\u0003:\u0016\u0000\u0444\u0445\u0001"+ + "\u0000\u0000\u0000\u0445\u0446\u0006\u007f\n\u0000\u0446\u010d\u0001\u0000"+ + "\u0000\u0000\u0447\u0448\u0003J\u001e\u0000\u0448\u0449\u0001\u0000\u0000"+ + "\u0000\u0449\u044a\u0006\u0080\r\u0000\u044a\u044b\u0006\u0080\u000e\u0000"+ + "\u044b\u044c\u0006\u0080\u000e\u0000\u044c\u010f\u0001\u0000\u0000\u0000"+ + "\u044d\u044e\u0003l/\u0000\u044e\u044f\u0001\u0000\u0000\u0000\u044f\u0450"+ + "\u0006\u0081\u0011\u0000\u0450\u0111\u0001\u0000\u0000\u0000\u0451\u0452"+ + "\u0003p1\u0000\u0452\u0453\u0001\u0000\u0000\u0000\u0453\u0454\u0006\u0082"+ + "\u0010\u0000\u0454\u0113\u0001\u0000\u0000\u0000\u0455\u0456\u0003t3\u0000"+ + "\u0456\u0457\u0001\u0000\u0000\u0000\u0457\u0458\u0006\u0083\u0014\u0000"+ + "\u0458\u0115\u0001\u0000\u0000\u0000\u0459\u045a\u0003\u00fex\u0000\u045a"+ + "\u045b\u0001\u0000\u0000\u0000\u045b\u045c\u0006\u0084\u001a\u0000\u045c"+ + "\u0117\u0001\u0000\u0000\u0000\u045d\u045e\u0003\u00deh\u0000\u045e\u045f"+ + "\u0001\u0000\u0000\u0000\u045f\u0460\u0006\u0085\u0015\u0000\u0460\u0119"+ + "\u0001\u0000\u0000\u0000\u0461\u0462\u0003\u00b4S\u0000\u0462\u0463\u0001"+ + "\u0000\u0000\u0000\u0463\u0464\u0006\u0086\u0018\u0000\u0464\u011b\u0001"+ + "\u0000\u0000\u0000\u0465\u0466\u00036\u0014\u0000\u0466\u0467\u0001\u0000"+ + "\u0000\u0000\u0467\u0468\u0006\u0087\n\u0000\u0468\u011d\u0001\u0000\u0000"+ + "\u0000\u0469\u046a\u00038\u0015\u0000\u046a\u046b\u0001\u0000\u0000\u0000"+ + "\u046b\u046c\u0006\u0088\n\u0000\u046c\u011f\u0001\u0000\u0000\u0000\u046d"+ + "\u046e\u0003:\u0016\u0000\u046e\u046f\u0001\u0000\u0000\u0000\u046f\u0470"+ + "\u0006\u0089\n\u0000\u0470\u0121\u0001\u0000\u0000\u0000\u0471\u0472\u0003"+ + "J\u001e\u0000\u0472\u0473\u0001\u0000\u0000\u0000\u0473\u0474\u0006\u008a"+ + "\r\u0000\u0474\u0475\u0006\u008a\u000e\u0000\u0475\u0123\u0001\u0000\u0000"+ + "\u0000\u0476\u0477\u0003t3\u0000\u0477\u0478\u0001\u0000\u0000\u0000\u0478"+ + "\u0479\u0006\u008b\u0014\u0000\u0479\u0125\u0001\u0000\u0000\u0000\u047a"+ + "\u047b\u0003\u00b4S\u0000\u047b\u047c\u0001\u0000\u0000\u0000\u047c\u047d"+ + "\u0006\u008c\u0018\u0000\u047d\u0127\u0001\u0000\u0000\u0000\u047e\u047f"+ + "\u0003\u00b0Q\u0000\u047f\u0480\u0001\u0000\u0000\u0000\u0480\u0481\u0006"+ + "\u008d\u001b\u0000\u0481\u0129\u0001\u0000\u0000\u0000\u0482\u0483\u0003"+ + "6\u0014\u0000\u0483\u0484\u0001\u0000\u0000\u0000\u0484\u0485\u0006\u008e"+ + "\n\u0000\u0485\u012b\u0001\u0000\u0000\u0000\u0486\u0487\u00038\u0015"+ + "\u0000\u0487\u0488\u0001\u0000\u0000\u0000\u0488\u0489\u0006\u008f\n\u0000"+ + "\u0489\u012d\u0001\u0000\u0000\u0000\u048a\u048b\u0003:\u0016\u0000\u048b"+ + "\u048c\u0001\u0000\u0000\u0000\u048c\u048d\u0006\u0090\n\u0000\u048d\u012f"+ + "\u0001\u0000\u0000\u0000\u048e\u048f\u0003J\u001e\u0000\u048f\u0490\u0001"+ + "\u0000\u0000\u0000\u0490\u0491\u0006\u0091\r\u0000\u0491\u0492\u0006\u0091"+ + "\u000e\u0000\u0492\u0131\u0001\u0000\u0000\u0000\u0493\u0494\u0005i\u0000"+ + "\u0000\u0494\u0495\u0005n\u0000\u0000\u0495\u0496\u0005f\u0000\u0000\u0496"+ + "\u0497\u0005o\u0000\u0000\u0497\u0133\u0001\u0000\u0000\u0000\u0498\u0499"+ + "\u00036\u0014\u0000\u0499\u049a\u0001\u0000\u0000\u0000\u049a\u049b\u0006"+ + "\u0093\n\u0000\u049b\u0135\u0001\u0000\u0000\u0000\u049c\u049d\u00038"+ + "\u0015\u0000\u049d\u049e\u0001\u0000\u0000\u0000\u049e\u049f\u0006\u0094"+ + "\n\u0000\u049f\u0137\u0001\u0000\u0000\u0000\u04a0\u04a1\u0003:\u0016"+ + "\u0000\u04a1\u04a2\u0001\u0000\u0000\u0000\u04a2\u04a3\u0006\u0095\n\u0000"+ + "\u04a3\u0139\u0001\u0000\u0000\u0000\u04a4\u04a5\u0003J\u001e\u0000\u04a5"+ + "\u04a6\u0001\u0000\u0000\u0000\u04a6\u04a7\u0006\u0096\r\u0000\u04a7\u04a8"+ + "\u0006\u0096\u000e\u0000\u04a8\u013b\u0001\u0000\u0000\u0000\u04a9\u04aa"+ + "\u0005f\u0000\u0000\u04aa\u04ab\u0005u\u0000\u0000\u04ab\u04ac\u0005n"+ + "\u0000\u0000\u04ac\u04ad\u0005c\u0000\u0000\u04ad\u04ae\u0005t\u0000\u0000"+ + "\u04ae\u04af\u0005i\u0000\u0000\u04af\u04b0\u0005o\u0000\u0000\u04b0\u04b1"+ + "\u0005n\u0000\u0000\u04b1\u04b2\u0005s\u0000\u0000\u04b2\u013d\u0001\u0000"+ + "\u0000\u0000\u04b3\u04b4\u00036\u0014\u0000\u04b4\u04b5\u0001\u0000\u0000"+ + "\u0000\u04b5\u04b6\u0006\u0098\n\u0000\u04b6\u013f\u0001\u0000\u0000\u0000"+ + "\u04b7\u04b8\u00038\u0015\u0000\u04b8\u04b9\u0001\u0000\u0000\u0000\u04b9"+ + "\u04ba\u0006\u0099\n\u0000\u04ba\u0141\u0001\u0000\u0000\u0000\u04bb\u04bc"+ + "\u0003:\u0016\u0000\u04bc\u04bd\u0001\u0000\u0000\u0000\u04bd\u04be\u0006"+ + "\u009a\n\u0000\u04be\u0143\u0001\u0000\u0000\u0000\u04bf\u04c0\u0003\u00ae"+ + "P\u0000\u04c0\u04c1\u0001\u0000\u0000\u0000\u04c1\u04c2\u0006\u009b\u000f"+ + "\u0000\u04c2\u04c3\u0006\u009b\u000e\u0000\u04c3\u0145\u0001\u0000\u0000"+ + "\u0000\u04c4\u04c5\u0005:\u0000\u0000\u04c5\u0147\u0001\u0000\u0000\u0000"+ + "\u04c6\u04cc\u0003V$\u0000\u04c7\u04cc\u0003L\u001f\u0000\u04c8\u04cc"+ + "\u0003t3\u0000\u04c9\u04cc\u0003N \u0000\u04ca\u04cc\u0003\\\'\u0000\u04cb"+ + "\u04c6\u0001\u0000\u0000\u0000\u04cb\u04c7\u0001\u0000\u0000\u0000\u04cb"+ + "\u04c8\u0001\u0000\u0000\u0000\u04cb\u04c9\u0001\u0000\u0000\u0000\u04cb"+ + "\u04ca\u0001\u0000\u0000\u0000\u04cc\u04cd\u0001\u0000\u0000\u0000\u04cd"+ + "\u04cb\u0001\u0000\u0000\u0000\u04cd\u04ce\u0001\u0000\u0000\u0000\u04ce"+ + "\u0149\u0001\u0000\u0000\u0000\u04cf\u04d0\u00036\u0014\u0000\u04d0\u04d1"+ + "\u0001\u0000\u0000\u0000\u04d1\u04d2\u0006\u009e\n\u0000\u04d2\u014b\u0001"+ + "\u0000\u0000\u0000\u04d3\u04d4\u00038\u0015\u0000\u04d4\u04d5\u0001\u0000"+ + "\u0000\u0000\u04d5\u04d6\u0006\u009f\n\u0000\u04d6\u014d\u0001\u0000\u0000"+ + "\u0000\u04d7\u04d8\u0003:\u0016\u0000\u04d8\u04d9\u0001\u0000\u0000\u0000"+ + "\u04d9\u04da\u0006\u00a0\n\u0000\u04da\u014f\u0001\u0000\u0000\u0000\u04db"+ + "\u04dc\u0003J\u001e\u0000\u04dc\u04dd\u0001\u0000\u0000\u0000\u04dd\u04de"+ + "\u0006\u00a1\r\u0000\u04de\u04df\u0006\u00a1\u000e\u0000\u04df\u0151\u0001"+ + "\u0000\u0000\u0000\u04e0\u04e1\u0003>\u0018\u0000\u04e1\u04e2\u0001\u0000"+ + "\u0000\u0000\u04e2\u04e3\u0006\u00a2\u0013\u0000\u04e3\u04e4\u0006\u00a2"+ + "\u000e\u0000\u04e4\u04e5\u0006\u00a2\u001c\u0000\u04e5\u0153\u0001\u0000"+ + "\u0000\u0000\u04e6\u04e7\u00036\u0014\u0000\u04e7\u04e8\u0001\u0000\u0000"+ + "\u0000\u04e8\u04e9\u0006\u00a3\n\u0000\u04e9\u0155\u0001\u0000\u0000\u0000"+ + "\u04ea\u04eb\u00038\u0015\u0000\u04eb\u04ec\u0001\u0000\u0000\u0000\u04ec"+ + "\u04ed\u0006\u00a4\n\u0000\u04ed\u0157\u0001\u0000\u0000\u0000\u04ee\u04ef"+ + "\u0003:\u0016\u0000\u04ef\u04f0\u0001\u0000\u0000\u0000\u04f0\u04f1\u0006"+ + "\u00a5\n\u0000\u04f1\u0159\u0001\u0000\u0000\u0000\u04f2\u04f3\u0003p"+ + "1\u0000\u04f3\u04f4\u0001\u0000\u0000\u0000\u04f4\u04f5\u0006\u00a6\u0010"+ + "\u0000\u04f5\u04f6\u0006\u00a6\u000e\u0000\u04f6\u04f7\u0006\u00a6\u0006"+ + "\u0000\u04f7\u015b\u0001\u0000\u0000\u0000\u04f8\u04f9\u00036\u0014\u0000"+ + "\u04f9\u04fa\u0001\u0000\u0000\u0000\u04fa\u04fb\u0006\u00a7\n\u0000\u04fb"+ + "\u015d\u0001\u0000\u0000\u0000\u04fc\u04fd\u00038\u0015\u0000\u04fd\u04fe"+ + "\u0001\u0000\u0000\u0000\u04fe\u04ff\u0006\u00a8\n\u0000\u04ff\u015f\u0001"+ + "\u0000\u0000\u0000\u0500\u0501\u0003:\u0016\u0000\u0501\u0502\u0001\u0000"+ + "\u0000\u0000\u0502\u0503\u0006\u00a9\n\u0000\u0503\u0161\u0001\u0000\u0000"+ + "\u0000\u0504\u0505\u0003\u00b4S\u0000\u0505\u0506\u0001\u0000\u0000\u0000"+ + "\u0506\u0507\u0006\u00aa\u000e\u0000\u0507\u0508\u0006\u00aa\u0000\u0000"+ + "\u0508\u0509\u0006\u00aa\u0018\u0000\u0509\u0163\u0001\u0000\u0000\u0000"+ + "\u050a\u050b\u0003\u00b0Q\u0000\u050b\u050c\u0001\u0000\u0000\u0000\u050c"+ + "\u050d\u0006\u00ab\u000e\u0000\u050d\u050e\u0006\u00ab\u0000\u0000\u050e"+ + "\u050f\u0006\u00ab\u001b\u0000\u050f\u0165\u0001\u0000\u0000\u0000\u0510"+ + "\u0511\u0003f,\u0000\u0511\u0512\u0001\u0000\u0000\u0000\u0512\u0513\u0006"+ + "\u00ac\u000e\u0000\u0513\u0514\u0006\u00ac\u0000\u0000\u0514\u0515\u0006"+ + "\u00ac\u001d\u0000\u0515\u0167\u0001\u0000\u0000\u0000\u0516\u0517\u0003"+ + "J\u001e\u0000\u0517\u0518\u0001\u0000\u0000\u0000\u0518\u0519\u0006\u00ad"+ + "\r\u0000\u0519\u051a\u0006\u00ad\u000e\u0000\u051a\u0169\u0001\u0000\u0000"+ + "\u0000<\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\f"+ + "\r\u020d\u0217\u021b\u021e\u0227\u0229\u0234\u023b\u0240\u0267\u026c\u0275"+ + "\u027c\u0281\u0283\u028e\u0296\u0299\u029b\u02a0\u02a5\u02ab\u02b2\u02b7"+ + "\u02bd\u02c0\u02c8\u02cc\u0351\u0356\u035b\u035d\u0363\u03c0\u03c4\u03c9"+ + "\u03ce\u03d3\u03d5\u03d9\u03db\u0428\u042c\u0431\u04cb\u04cd\u001e\u0005"+ + "\u0002\u0000\u0005\u0004\u0000\u0005\u0006\u0000\u0005\u0001\u0000\u0005"+ + "\u0003\u0000\u0005\n\u0000\u0005\f\u0000\u0005\b\u0000\u0005\u0005\u0000"+ + "\u0005\t\u0000\u0000\u0001\u0000\u0007C\u0000\u0005\u0000\u0000\u0007"+ + "\u001c\u0000\u0004\u0000\u0000\u0007D\u0000\u0007%\u0000\u0007#\u0000"+ + "\u0007\u001d\u0000\u0007\u0018\u0000\u0007\'\u0000\u0007O\u0000\u0005"+ + "\u000b\u0000\u0005\u0007\u0000\u0007F\u0000\u0007Y\u0000\u0007X\u0000"+ + "\u0007E\u0000\u0005\r\u0000\u0007 \u0000"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp index b4a8e60dd69aa..461605d5f0231 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp @@ -11,6 +11,7 @@ null 'keep' 'limit' 'meta' +'metrics' 'mv_expand' 'rename' 'row' @@ -25,6 +26,7 @@ null null null null +null '|' null null @@ -80,7 +82,6 @@ null null null null -null 'as' null null @@ -110,6 +111,12 @@ null null null null +null +null +null +null +null +null token symbolic names: null @@ -124,6 +131,7 @@ INLINESTATS KEEP LIMIT META +METRICS MV_EXPAND RENAME ROW @@ -135,6 +143,7 @@ UNKNOWN_CMD LINE_COMMENT MULTILINE_COMMENT WS +INDEX_UNQUOTED_IDENTIFIER EXPLAIN_WS EXPLAIN_LINE_COMMENT EXPLAIN_MULTILINE_COMMENT @@ -186,7 +195,6 @@ EXPR_MULTILINE_COMMENT EXPR_WS OPTIONS METADATA -FROM_UNQUOTED_IDENTIFIER FROM_LINE_COMMENT FROM_MULTILINE_COMMENT FROM_WS @@ -223,6 +231,12 @@ SETTING SETTING_LINE_COMMENT SETTTING_MULTILINE_COMMENT SETTING_WS +METRICS_LINE_COMMENT +METRICS_MULTILINE_COMMENT +METRICS_WS +CLOSING_METRICS_LINE_COMMENT +CLOSING_METRICS_MULTILINE_COMMENT +CLOSING_METRICS_WS rule names: singleStatement @@ -241,12 +255,13 @@ rowCommand fields field fromCommand -fromIdentifier +indexIdentifier fromOptions configOption metadata metadataOption deprecated_metadata +metricsCommand evalCommand statsCommand inlinestatsCommand @@ -282,4 +297,4 @@ enrichWithClause atn: -[4, 1, 110, 543, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 118, 8, 1, 10, 1, 12, 1, 121, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 128, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 143, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 155, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 162, 8, 5, 10, 5, 12, 5, 165, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 172, 8, 5, 1, 5, 1, 5, 3, 5, 176, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 184, 8, 5, 10, 5, 12, 5, 187, 9, 5, 1, 6, 1, 6, 3, 6, 191, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 198, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 203, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 3, 7, 210, 8, 7, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 216, 8, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 5, 8, 224, 8, 8, 10, 8, 12, 8, 227, 9, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 237, 8, 9, 1, 9, 1, 9, 1, 9, 5, 9, 242, 8, 9, 10, 9, 12, 9, 245, 9, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 5, 10, 253, 8, 10, 10, 10, 12, 10, 256, 9, 10, 3, 10, 258, 8, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 5, 13, 270, 8, 13, 10, 13, 12, 13, 273, 9, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 3, 14, 280, 8, 14, 1, 15, 1, 15, 1, 15, 1, 15, 5, 15, 286, 8, 15, 10, 15, 12, 15, 289, 9, 15, 1, 15, 3, 15, 292, 8, 15, 1, 15, 3, 15, 295, 8, 15, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 5, 17, 303, 8, 17, 10, 17, 12, 17, 306, 9, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 3, 19, 314, 8, 19, 1, 20, 1, 20, 1, 20, 1, 20, 5, 20, 320, 8, 20, 10, 20, 12, 20, 323, 9, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 3, 23, 334, 8, 23, 1, 23, 1, 23, 3, 23, 338, 8, 23, 1, 24, 1, 24, 1, 24, 1, 24, 3, 24, 344, 8, 24, 1, 25, 1, 25, 1, 25, 5, 25, 349, 8, 25, 10, 25, 12, 25, 352, 9, 25, 1, 26, 1, 26, 1, 26, 5, 26, 357, 8, 26, 10, 26, 12, 26, 360, 9, 26, 1, 27, 1, 27, 1, 28, 1, 28, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 379, 8, 29, 10, 29, 12, 29, 382, 9, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 390, 8, 29, 10, 29, 12, 29, 393, 9, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 401, 8, 29, 10, 29, 12, 29, 404, 9, 29, 1, 29, 1, 29, 3, 29, 408, 8, 29, 1, 30, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 1, 31, 5, 31, 417, 8, 31, 10, 31, 12, 31, 420, 9, 31, 1, 32, 1, 32, 3, 32, 424, 8, 32, 1, 32, 1, 32, 3, 32, 428, 8, 32, 1, 33, 1, 33, 1, 33, 1, 33, 5, 33, 434, 8, 33, 10, 33, 12, 33, 437, 9, 33, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 443, 8, 34, 10, 34, 12, 34, 446, 9, 34, 1, 35, 1, 35, 1, 35, 1, 35, 5, 35, 452, 8, 35, 10, 35, 12, 35, 455, 9, 35, 1, 36, 1, 36, 1, 36, 1, 36, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 465, 8, 37, 1, 38, 1, 38, 1, 38, 1, 38, 1, 39, 1, 39, 1, 39, 1, 40, 1, 40, 1, 40, 5, 40, 477, 8, 40, 10, 40, 12, 40, 480, 9, 40, 1, 41, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 43, 1, 43, 3, 43, 490, 8, 43, 1, 44, 3, 44, 493, 8, 44, 1, 44, 1, 44, 1, 45, 3, 45, 498, 8, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 47, 1, 47, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 52, 3, 52, 523, 8, 52, 1, 52, 1, 52, 1, 52, 1, 52, 5, 52, 529, 8, 52, 10, 52, 12, 52, 532, 9, 52, 3, 52, 534, 8, 52, 1, 53, 1, 53, 1, 53, 3, 53, 539, 8, 53, 1, 53, 1, 53, 1, 53, 0, 4, 2, 10, 16, 18, 54, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 0, 8, 1, 0, 60, 61, 1, 0, 62, 64, 2, 0, 68, 68, 74, 74, 1, 0, 67, 68, 2, 0, 32, 32, 36, 36, 1, 0, 39, 40, 2, 0, 38, 38, 52, 52, 2, 0, 53, 53, 55, 59, 568, 0, 108, 1, 0, 0, 0, 2, 111, 1, 0, 0, 0, 4, 127, 1, 0, 0, 0, 6, 142, 1, 0, 0, 0, 8, 144, 1, 0, 0, 0, 10, 175, 1, 0, 0, 0, 12, 202, 1, 0, 0, 0, 14, 209, 1, 0, 0, 0, 16, 215, 1, 0, 0, 0, 18, 236, 1, 0, 0, 0, 20, 246, 1, 0, 0, 0, 22, 261, 1, 0, 0, 0, 24, 263, 1, 0, 0, 0, 26, 266, 1, 0, 0, 0, 28, 279, 1, 0, 0, 0, 30, 281, 1, 0, 0, 0, 32, 296, 1, 0, 0, 0, 34, 298, 1, 0, 0, 0, 36, 307, 1, 0, 0, 0, 38, 313, 1, 0, 0, 0, 40, 315, 1, 0, 0, 0, 42, 324, 1, 0, 0, 0, 44, 328, 1, 0, 0, 0, 46, 331, 1, 0, 0, 0, 48, 339, 1, 0, 0, 0, 50, 345, 1, 0, 0, 0, 52, 353, 1, 0, 0, 0, 54, 361, 1, 0, 0, 0, 56, 363, 1, 0, 0, 0, 58, 407, 1, 0, 0, 0, 60, 409, 1, 0, 0, 0, 62, 412, 1, 0, 0, 0, 64, 421, 1, 0, 0, 0, 66, 429, 1, 0, 0, 0, 68, 438, 1, 0, 0, 0, 70, 447, 1, 0, 0, 0, 72, 456, 1, 0, 0, 0, 74, 460, 1, 0, 0, 0, 76, 466, 1, 0, 0, 0, 78, 470, 1, 0, 0, 0, 80, 473, 1, 0, 0, 0, 82, 481, 1, 0, 0, 0, 84, 485, 1, 0, 0, 0, 86, 489, 1, 0, 0, 0, 88, 492, 1, 0, 0, 0, 90, 497, 1, 0, 0, 0, 92, 501, 1, 0, 0, 0, 94, 503, 1, 0, 0, 0, 96, 505, 1, 0, 0, 0, 98, 508, 1, 0, 0, 0, 100, 512, 1, 0, 0, 0, 102, 515, 1, 0, 0, 0, 104, 518, 1, 0, 0, 0, 106, 538, 1, 0, 0, 0, 108, 109, 3, 2, 1, 0, 109, 110, 5, 0, 0, 1, 110, 1, 1, 0, 0, 0, 111, 112, 6, 1, -1, 0, 112, 113, 3, 4, 2, 0, 113, 119, 1, 0, 0, 0, 114, 115, 10, 1, 0, 0, 115, 116, 5, 26, 0, 0, 116, 118, 3, 6, 3, 0, 117, 114, 1, 0, 0, 0, 118, 121, 1, 0, 0, 0, 119, 117, 1, 0, 0, 0, 119, 120, 1, 0, 0, 0, 120, 3, 1, 0, 0, 0, 121, 119, 1, 0, 0, 0, 122, 128, 3, 96, 48, 0, 123, 128, 3, 30, 15, 0, 124, 128, 3, 24, 12, 0, 125, 128, 3, 100, 50, 0, 126, 128, 3, 102, 51, 0, 127, 122, 1, 0, 0, 0, 127, 123, 1, 0, 0, 0, 127, 124, 1, 0, 0, 0, 127, 125, 1, 0, 0, 0, 127, 126, 1, 0, 0, 0, 128, 5, 1, 0, 0, 0, 129, 143, 3, 44, 22, 0, 130, 143, 3, 48, 24, 0, 131, 143, 3, 60, 30, 0, 132, 143, 3, 66, 33, 0, 133, 143, 3, 62, 31, 0, 134, 143, 3, 46, 23, 0, 135, 143, 3, 8, 4, 0, 136, 143, 3, 68, 34, 0, 137, 143, 3, 70, 35, 0, 138, 143, 3, 74, 37, 0, 139, 143, 3, 76, 38, 0, 140, 143, 3, 104, 52, 0, 141, 143, 3, 78, 39, 0, 142, 129, 1, 0, 0, 0, 142, 130, 1, 0, 0, 0, 142, 131, 1, 0, 0, 0, 142, 132, 1, 0, 0, 0, 142, 133, 1, 0, 0, 0, 142, 134, 1, 0, 0, 0, 142, 135, 1, 0, 0, 0, 142, 136, 1, 0, 0, 0, 142, 137, 1, 0, 0, 0, 142, 138, 1, 0, 0, 0, 142, 139, 1, 0, 0, 0, 142, 140, 1, 0, 0, 0, 142, 141, 1, 0, 0, 0, 143, 7, 1, 0, 0, 0, 144, 145, 5, 18, 0, 0, 145, 146, 3, 10, 5, 0, 146, 9, 1, 0, 0, 0, 147, 148, 6, 5, -1, 0, 148, 149, 5, 45, 0, 0, 149, 176, 3, 10, 5, 7, 150, 176, 3, 14, 7, 0, 151, 176, 3, 12, 6, 0, 152, 154, 3, 14, 7, 0, 153, 155, 5, 45, 0, 0, 154, 153, 1, 0, 0, 0, 154, 155, 1, 0, 0, 0, 155, 156, 1, 0, 0, 0, 156, 157, 5, 42, 0, 0, 157, 158, 5, 41, 0, 0, 158, 163, 3, 14, 7, 0, 159, 160, 5, 35, 0, 0, 160, 162, 3, 14, 7, 0, 161, 159, 1, 0, 0, 0, 162, 165, 1, 0, 0, 0, 163, 161, 1, 0, 0, 0, 163, 164, 1, 0, 0, 0, 164, 166, 1, 0, 0, 0, 165, 163, 1, 0, 0, 0, 166, 167, 5, 51, 0, 0, 167, 176, 1, 0, 0, 0, 168, 169, 3, 14, 7, 0, 169, 171, 5, 43, 0, 0, 170, 172, 5, 45, 0, 0, 171, 170, 1, 0, 0, 0, 171, 172, 1, 0, 0, 0, 172, 173, 1, 0, 0, 0, 173, 174, 5, 46, 0, 0, 174, 176, 1, 0, 0, 0, 175, 147, 1, 0, 0, 0, 175, 150, 1, 0, 0, 0, 175, 151, 1, 0, 0, 0, 175, 152, 1, 0, 0, 0, 175, 168, 1, 0, 0, 0, 176, 185, 1, 0, 0, 0, 177, 178, 10, 4, 0, 0, 178, 179, 5, 31, 0, 0, 179, 184, 3, 10, 5, 5, 180, 181, 10, 3, 0, 0, 181, 182, 5, 48, 0, 0, 182, 184, 3, 10, 5, 4, 183, 177, 1, 0, 0, 0, 183, 180, 1, 0, 0, 0, 184, 187, 1, 0, 0, 0, 185, 183, 1, 0, 0, 0, 185, 186, 1, 0, 0, 0, 186, 11, 1, 0, 0, 0, 187, 185, 1, 0, 0, 0, 188, 190, 3, 14, 7, 0, 189, 191, 5, 45, 0, 0, 190, 189, 1, 0, 0, 0, 190, 191, 1, 0, 0, 0, 191, 192, 1, 0, 0, 0, 192, 193, 5, 44, 0, 0, 193, 194, 3, 92, 46, 0, 194, 203, 1, 0, 0, 0, 195, 197, 3, 14, 7, 0, 196, 198, 5, 45, 0, 0, 197, 196, 1, 0, 0, 0, 197, 198, 1, 0, 0, 0, 198, 199, 1, 0, 0, 0, 199, 200, 5, 50, 0, 0, 200, 201, 3, 92, 46, 0, 201, 203, 1, 0, 0, 0, 202, 188, 1, 0, 0, 0, 202, 195, 1, 0, 0, 0, 203, 13, 1, 0, 0, 0, 204, 210, 3, 16, 8, 0, 205, 206, 3, 16, 8, 0, 206, 207, 3, 94, 47, 0, 207, 208, 3, 16, 8, 0, 208, 210, 1, 0, 0, 0, 209, 204, 1, 0, 0, 0, 209, 205, 1, 0, 0, 0, 210, 15, 1, 0, 0, 0, 211, 212, 6, 8, -1, 0, 212, 216, 3, 18, 9, 0, 213, 214, 7, 0, 0, 0, 214, 216, 3, 16, 8, 3, 215, 211, 1, 0, 0, 0, 215, 213, 1, 0, 0, 0, 216, 225, 1, 0, 0, 0, 217, 218, 10, 2, 0, 0, 218, 219, 7, 1, 0, 0, 219, 224, 3, 16, 8, 3, 220, 221, 10, 1, 0, 0, 221, 222, 7, 0, 0, 0, 222, 224, 3, 16, 8, 2, 223, 217, 1, 0, 0, 0, 223, 220, 1, 0, 0, 0, 224, 227, 1, 0, 0, 0, 225, 223, 1, 0, 0, 0, 225, 226, 1, 0, 0, 0, 226, 17, 1, 0, 0, 0, 227, 225, 1, 0, 0, 0, 228, 229, 6, 9, -1, 0, 229, 237, 3, 58, 29, 0, 230, 237, 3, 50, 25, 0, 231, 237, 3, 20, 10, 0, 232, 233, 5, 41, 0, 0, 233, 234, 3, 10, 5, 0, 234, 235, 5, 51, 0, 0, 235, 237, 1, 0, 0, 0, 236, 228, 1, 0, 0, 0, 236, 230, 1, 0, 0, 0, 236, 231, 1, 0, 0, 0, 236, 232, 1, 0, 0, 0, 237, 243, 1, 0, 0, 0, 238, 239, 10, 1, 0, 0, 239, 240, 5, 34, 0, 0, 240, 242, 3, 22, 11, 0, 241, 238, 1, 0, 0, 0, 242, 245, 1, 0, 0, 0, 243, 241, 1, 0, 0, 0, 243, 244, 1, 0, 0, 0, 244, 19, 1, 0, 0, 0, 245, 243, 1, 0, 0, 0, 246, 247, 3, 54, 27, 0, 247, 257, 5, 41, 0, 0, 248, 258, 5, 62, 0, 0, 249, 254, 3, 10, 5, 0, 250, 251, 5, 35, 0, 0, 251, 253, 3, 10, 5, 0, 252, 250, 1, 0, 0, 0, 253, 256, 1, 0, 0, 0, 254, 252, 1, 0, 0, 0, 254, 255, 1, 0, 0, 0, 255, 258, 1, 0, 0, 0, 256, 254, 1, 0, 0, 0, 257, 248, 1, 0, 0, 0, 257, 249, 1, 0, 0, 0, 257, 258, 1, 0, 0, 0, 258, 259, 1, 0, 0, 0, 259, 260, 5, 51, 0, 0, 260, 21, 1, 0, 0, 0, 261, 262, 3, 54, 27, 0, 262, 23, 1, 0, 0, 0, 263, 264, 5, 14, 0, 0, 264, 265, 3, 26, 13, 0, 265, 25, 1, 0, 0, 0, 266, 271, 3, 28, 14, 0, 267, 268, 5, 35, 0, 0, 268, 270, 3, 28, 14, 0, 269, 267, 1, 0, 0, 0, 270, 273, 1, 0, 0, 0, 271, 269, 1, 0, 0, 0, 271, 272, 1, 0, 0, 0, 272, 27, 1, 0, 0, 0, 273, 271, 1, 0, 0, 0, 274, 280, 3, 10, 5, 0, 275, 276, 3, 50, 25, 0, 276, 277, 5, 33, 0, 0, 277, 278, 3, 10, 5, 0, 278, 280, 1, 0, 0, 0, 279, 274, 1, 0, 0, 0, 279, 275, 1, 0, 0, 0, 280, 29, 1, 0, 0, 0, 281, 282, 5, 6, 0, 0, 282, 287, 3, 32, 16, 0, 283, 284, 5, 35, 0, 0, 284, 286, 3, 32, 16, 0, 285, 283, 1, 0, 0, 0, 286, 289, 1, 0, 0, 0, 287, 285, 1, 0, 0, 0, 287, 288, 1, 0, 0, 0, 288, 291, 1, 0, 0, 0, 289, 287, 1, 0, 0, 0, 290, 292, 3, 38, 19, 0, 291, 290, 1, 0, 0, 0, 291, 292, 1, 0, 0, 0, 292, 294, 1, 0, 0, 0, 293, 295, 3, 34, 17, 0, 294, 293, 1, 0, 0, 0, 294, 295, 1, 0, 0, 0, 295, 31, 1, 0, 0, 0, 296, 297, 7, 2, 0, 0, 297, 33, 1, 0, 0, 0, 298, 299, 5, 72, 0, 0, 299, 304, 3, 36, 18, 0, 300, 301, 5, 35, 0, 0, 301, 303, 3, 36, 18, 0, 302, 300, 1, 0, 0, 0, 303, 306, 1, 0, 0, 0, 304, 302, 1, 0, 0, 0, 304, 305, 1, 0, 0, 0, 305, 35, 1, 0, 0, 0, 306, 304, 1, 0, 0, 0, 307, 308, 3, 92, 46, 0, 308, 309, 5, 33, 0, 0, 309, 310, 3, 92, 46, 0, 310, 37, 1, 0, 0, 0, 311, 314, 3, 40, 20, 0, 312, 314, 3, 42, 21, 0, 313, 311, 1, 0, 0, 0, 313, 312, 1, 0, 0, 0, 314, 39, 1, 0, 0, 0, 315, 316, 5, 73, 0, 0, 316, 321, 3, 32, 16, 0, 317, 318, 5, 35, 0, 0, 318, 320, 3, 32, 16, 0, 319, 317, 1, 0, 0, 0, 320, 323, 1, 0, 0, 0, 321, 319, 1, 0, 0, 0, 321, 322, 1, 0, 0, 0, 322, 41, 1, 0, 0, 0, 323, 321, 1, 0, 0, 0, 324, 325, 5, 65, 0, 0, 325, 326, 3, 40, 20, 0, 326, 327, 5, 66, 0, 0, 327, 43, 1, 0, 0, 0, 328, 329, 5, 4, 0, 0, 329, 330, 3, 26, 13, 0, 330, 45, 1, 0, 0, 0, 331, 333, 5, 17, 0, 0, 332, 334, 3, 26, 13, 0, 333, 332, 1, 0, 0, 0, 333, 334, 1, 0, 0, 0, 334, 337, 1, 0, 0, 0, 335, 336, 5, 30, 0, 0, 336, 338, 3, 26, 13, 0, 337, 335, 1, 0, 0, 0, 337, 338, 1, 0, 0, 0, 338, 47, 1, 0, 0, 0, 339, 340, 5, 8, 0, 0, 340, 343, 3, 26, 13, 0, 341, 342, 5, 30, 0, 0, 342, 344, 3, 26, 13, 0, 343, 341, 1, 0, 0, 0, 343, 344, 1, 0, 0, 0, 344, 49, 1, 0, 0, 0, 345, 350, 3, 54, 27, 0, 346, 347, 5, 37, 0, 0, 347, 349, 3, 54, 27, 0, 348, 346, 1, 0, 0, 0, 349, 352, 1, 0, 0, 0, 350, 348, 1, 0, 0, 0, 350, 351, 1, 0, 0, 0, 351, 51, 1, 0, 0, 0, 352, 350, 1, 0, 0, 0, 353, 358, 3, 56, 28, 0, 354, 355, 5, 37, 0, 0, 355, 357, 3, 56, 28, 0, 356, 354, 1, 0, 0, 0, 357, 360, 1, 0, 0, 0, 358, 356, 1, 0, 0, 0, 358, 359, 1, 0, 0, 0, 359, 53, 1, 0, 0, 0, 360, 358, 1, 0, 0, 0, 361, 362, 7, 3, 0, 0, 362, 55, 1, 0, 0, 0, 363, 364, 5, 78, 0, 0, 364, 57, 1, 0, 0, 0, 365, 408, 5, 46, 0, 0, 366, 367, 3, 90, 45, 0, 367, 368, 5, 67, 0, 0, 368, 408, 1, 0, 0, 0, 369, 408, 3, 88, 44, 0, 370, 408, 3, 90, 45, 0, 371, 408, 3, 84, 42, 0, 372, 408, 5, 49, 0, 0, 373, 408, 3, 92, 46, 0, 374, 375, 5, 65, 0, 0, 375, 380, 3, 86, 43, 0, 376, 377, 5, 35, 0, 0, 377, 379, 3, 86, 43, 0, 378, 376, 1, 0, 0, 0, 379, 382, 1, 0, 0, 0, 380, 378, 1, 0, 0, 0, 380, 381, 1, 0, 0, 0, 381, 383, 1, 0, 0, 0, 382, 380, 1, 0, 0, 0, 383, 384, 5, 66, 0, 0, 384, 408, 1, 0, 0, 0, 385, 386, 5, 65, 0, 0, 386, 391, 3, 84, 42, 0, 387, 388, 5, 35, 0, 0, 388, 390, 3, 84, 42, 0, 389, 387, 1, 0, 0, 0, 390, 393, 1, 0, 0, 0, 391, 389, 1, 0, 0, 0, 391, 392, 1, 0, 0, 0, 392, 394, 1, 0, 0, 0, 393, 391, 1, 0, 0, 0, 394, 395, 5, 66, 0, 0, 395, 408, 1, 0, 0, 0, 396, 397, 5, 65, 0, 0, 397, 402, 3, 92, 46, 0, 398, 399, 5, 35, 0, 0, 399, 401, 3, 92, 46, 0, 400, 398, 1, 0, 0, 0, 401, 404, 1, 0, 0, 0, 402, 400, 1, 0, 0, 0, 402, 403, 1, 0, 0, 0, 403, 405, 1, 0, 0, 0, 404, 402, 1, 0, 0, 0, 405, 406, 5, 66, 0, 0, 406, 408, 1, 0, 0, 0, 407, 365, 1, 0, 0, 0, 407, 366, 1, 0, 0, 0, 407, 369, 1, 0, 0, 0, 407, 370, 1, 0, 0, 0, 407, 371, 1, 0, 0, 0, 407, 372, 1, 0, 0, 0, 407, 373, 1, 0, 0, 0, 407, 374, 1, 0, 0, 0, 407, 385, 1, 0, 0, 0, 407, 396, 1, 0, 0, 0, 408, 59, 1, 0, 0, 0, 409, 410, 5, 10, 0, 0, 410, 411, 5, 28, 0, 0, 411, 61, 1, 0, 0, 0, 412, 413, 5, 16, 0, 0, 413, 418, 3, 64, 32, 0, 414, 415, 5, 35, 0, 0, 415, 417, 3, 64, 32, 0, 416, 414, 1, 0, 0, 0, 417, 420, 1, 0, 0, 0, 418, 416, 1, 0, 0, 0, 418, 419, 1, 0, 0, 0, 419, 63, 1, 0, 0, 0, 420, 418, 1, 0, 0, 0, 421, 423, 3, 10, 5, 0, 422, 424, 7, 4, 0, 0, 423, 422, 1, 0, 0, 0, 423, 424, 1, 0, 0, 0, 424, 427, 1, 0, 0, 0, 425, 426, 5, 47, 0, 0, 426, 428, 7, 5, 0, 0, 427, 425, 1, 0, 0, 0, 427, 428, 1, 0, 0, 0, 428, 65, 1, 0, 0, 0, 429, 430, 5, 9, 0, 0, 430, 435, 3, 52, 26, 0, 431, 432, 5, 35, 0, 0, 432, 434, 3, 52, 26, 0, 433, 431, 1, 0, 0, 0, 434, 437, 1, 0, 0, 0, 435, 433, 1, 0, 0, 0, 435, 436, 1, 0, 0, 0, 436, 67, 1, 0, 0, 0, 437, 435, 1, 0, 0, 0, 438, 439, 5, 2, 0, 0, 439, 444, 3, 52, 26, 0, 440, 441, 5, 35, 0, 0, 441, 443, 3, 52, 26, 0, 442, 440, 1, 0, 0, 0, 443, 446, 1, 0, 0, 0, 444, 442, 1, 0, 0, 0, 444, 445, 1, 0, 0, 0, 445, 69, 1, 0, 0, 0, 446, 444, 1, 0, 0, 0, 447, 448, 5, 13, 0, 0, 448, 453, 3, 72, 36, 0, 449, 450, 5, 35, 0, 0, 450, 452, 3, 72, 36, 0, 451, 449, 1, 0, 0, 0, 452, 455, 1, 0, 0, 0, 453, 451, 1, 0, 0, 0, 453, 454, 1, 0, 0, 0, 454, 71, 1, 0, 0, 0, 455, 453, 1, 0, 0, 0, 456, 457, 3, 52, 26, 0, 457, 458, 5, 82, 0, 0, 458, 459, 3, 52, 26, 0, 459, 73, 1, 0, 0, 0, 460, 461, 5, 1, 0, 0, 461, 462, 3, 18, 9, 0, 462, 464, 3, 92, 46, 0, 463, 465, 3, 80, 40, 0, 464, 463, 1, 0, 0, 0, 464, 465, 1, 0, 0, 0, 465, 75, 1, 0, 0, 0, 466, 467, 5, 7, 0, 0, 467, 468, 3, 18, 9, 0, 468, 469, 3, 92, 46, 0, 469, 77, 1, 0, 0, 0, 470, 471, 5, 12, 0, 0, 471, 472, 3, 50, 25, 0, 472, 79, 1, 0, 0, 0, 473, 478, 3, 82, 41, 0, 474, 475, 5, 35, 0, 0, 475, 477, 3, 82, 41, 0, 476, 474, 1, 0, 0, 0, 477, 480, 1, 0, 0, 0, 478, 476, 1, 0, 0, 0, 478, 479, 1, 0, 0, 0, 479, 81, 1, 0, 0, 0, 480, 478, 1, 0, 0, 0, 481, 482, 3, 54, 27, 0, 482, 483, 5, 33, 0, 0, 483, 484, 3, 58, 29, 0, 484, 83, 1, 0, 0, 0, 485, 486, 7, 6, 0, 0, 486, 85, 1, 0, 0, 0, 487, 490, 3, 88, 44, 0, 488, 490, 3, 90, 45, 0, 489, 487, 1, 0, 0, 0, 489, 488, 1, 0, 0, 0, 490, 87, 1, 0, 0, 0, 491, 493, 7, 0, 0, 0, 492, 491, 1, 0, 0, 0, 492, 493, 1, 0, 0, 0, 493, 494, 1, 0, 0, 0, 494, 495, 5, 29, 0, 0, 495, 89, 1, 0, 0, 0, 496, 498, 7, 0, 0, 0, 497, 496, 1, 0, 0, 0, 497, 498, 1, 0, 0, 0, 498, 499, 1, 0, 0, 0, 499, 500, 5, 28, 0, 0, 500, 91, 1, 0, 0, 0, 501, 502, 5, 27, 0, 0, 502, 93, 1, 0, 0, 0, 503, 504, 7, 7, 0, 0, 504, 95, 1, 0, 0, 0, 505, 506, 5, 5, 0, 0, 506, 507, 3, 98, 49, 0, 507, 97, 1, 0, 0, 0, 508, 509, 5, 65, 0, 0, 509, 510, 3, 2, 1, 0, 510, 511, 5, 66, 0, 0, 511, 99, 1, 0, 0, 0, 512, 513, 5, 15, 0, 0, 513, 514, 5, 98, 0, 0, 514, 101, 1, 0, 0, 0, 515, 516, 5, 11, 0, 0, 516, 517, 5, 102, 0, 0, 517, 103, 1, 0, 0, 0, 518, 519, 5, 3, 0, 0, 519, 522, 5, 88, 0, 0, 520, 521, 5, 86, 0, 0, 521, 523, 3, 52, 26, 0, 522, 520, 1, 0, 0, 0, 522, 523, 1, 0, 0, 0, 523, 533, 1, 0, 0, 0, 524, 525, 5, 87, 0, 0, 525, 530, 3, 106, 53, 0, 526, 527, 5, 35, 0, 0, 527, 529, 3, 106, 53, 0, 528, 526, 1, 0, 0, 0, 529, 532, 1, 0, 0, 0, 530, 528, 1, 0, 0, 0, 530, 531, 1, 0, 0, 0, 531, 534, 1, 0, 0, 0, 532, 530, 1, 0, 0, 0, 533, 524, 1, 0, 0, 0, 533, 534, 1, 0, 0, 0, 534, 105, 1, 0, 0, 0, 535, 536, 3, 52, 26, 0, 536, 537, 5, 33, 0, 0, 537, 539, 1, 0, 0, 0, 538, 535, 1, 0, 0, 0, 538, 539, 1, 0, 0, 0, 539, 540, 1, 0, 0, 0, 540, 541, 3, 52, 26, 0, 541, 107, 1, 0, 0, 0, 52, 119, 127, 142, 154, 163, 171, 175, 183, 185, 190, 197, 202, 209, 215, 223, 225, 236, 243, 254, 257, 271, 279, 287, 291, 294, 304, 313, 321, 333, 337, 343, 350, 358, 380, 391, 402, 407, 418, 423, 427, 435, 444, 453, 464, 478, 489, 492, 497, 522, 530, 533, 538] \ No newline at end of file +[4, 1, 117, 562, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 120, 8, 1, 10, 1, 12, 1, 123, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 131, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 146, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 158, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 165, 8, 5, 10, 5, 12, 5, 168, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 175, 8, 5, 1, 5, 1, 5, 3, 5, 179, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 187, 8, 5, 10, 5, 12, 5, 190, 9, 5, 1, 6, 1, 6, 3, 6, 194, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 201, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 206, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 3, 7, 213, 8, 7, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 219, 8, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 5, 8, 227, 8, 8, 10, 8, 12, 8, 230, 9, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 240, 8, 9, 1, 9, 1, 9, 1, 9, 5, 9, 245, 8, 9, 10, 9, 12, 9, 248, 9, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 5, 10, 256, 8, 10, 10, 10, 12, 10, 259, 9, 10, 3, 10, 261, 8, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 5, 13, 273, 8, 13, 10, 13, 12, 13, 276, 9, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 3, 14, 283, 8, 14, 1, 15, 1, 15, 1, 15, 1, 15, 5, 15, 289, 8, 15, 10, 15, 12, 15, 292, 9, 15, 1, 15, 3, 15, 295, 8, 15, 1, 15, 3, 15, 298, 8, 15, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 5, 17, 306, 8, 17, 10, 17, 12, 17, 309, 9, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 3, 19, 317, 8, 19, 1, 20, 1, 20, 1, 20, 1, 20, 5, 20, 323, 8, 20, 10, 20, 12, 20, 326, 9, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 5, 22, 336, 8, 22, 10, 22, 12, 22, 339, 9, 22, 1, 22, 3, 22, 342, 8, 22, 1, 22, 1, 22, 3, 22, 346, 8, 22, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 3, 24, 353, 8, 24, 1, 24, 1, 24, 3, 24, 357, 8, 24, 1, 25, 1, 25, 1, 25, 1, 25, 3, 25, 363, 8, 25, 1, 26, 1, 26, 1, 26, 5, 26, 368, 8, 26, 10, 26, 12, 26, 371, 9, 26, 1, 27, 1, 27, 1, 27, 5, 27, 376, 8, 27, 10, 27, 12, 27, 379, 9, 27, 1, 28, 1, 28, 1, 29, 1, 29, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 5, 30, 398, 8, 30, 10, 30, 12, 30, 401, 9, 30, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 5, 30, 409, 8, 30, 10, 30, 12, 30, 412, 9, 30, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 5, 30, 420, 8, 30, 10, 30, 12, 30, 423, 9, 30, 1, 30, 1, 30, 3, 30, 427, 8, 30, 1, 31, 1, 31, 1, 31, 1, 32, 1, 32, 1, 32, 1, 32, 5, 32, 436, 8, 32, 10, 32, 12, 32, 439, 9, 32, 1, 33, 1, 33, 3, 33, 443, 8, 33, 1, 33, 1, 33, 3, 33, 447, 8, 33, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 453, 8, 34, 10, 34, 12, 34, 456, 9, 34, 1, 35, 1, 35, 1, 35, 1, 35, 5, 35, 462, 8, 35, 10, 35, 12, 35, 465, 9, 35, 1, 36, 1, 36, 1, 36, 1, 36, 5, 36, 471, 8, 36, 10, 36, 12, 36, 474, 9, 36, 1, 37, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 38, 3, 38, 484, 8, 38, 1, 39, 1, 39, 1, 39, 1, 39, 1, 40, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 5, 41, 496, 8, 41, 10, 41, 12, 41, 499, 9, 41, 1, 42, 1, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 44, 1, 44, 3, 44, 509, 8, 44, 1, 45, 3, 45, 512, 8, 45, 1, 45, 1, 45, 1, 46, 3, 46, 517, 8, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 53, 3, 53, 542, 8, 53, 1, 53, 1, 53, 1, 53, 1, 53, 5, 53, 548, 8, 53, 10, 53, 12, 53, 551, 9, 53, 3, 53, 553, 8, 53, 1, 54, 1, 54, 1, 54, 3, 54, 558, 8, 54, 1, 54, 1, 54, 1, 54, 0, 4, 2, 10, 16, 18, 55, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 0, 7, 1, 0, 62, 63, 1, 0, 64, 66, 1, 0, 69, 70, 2, 0, 34, 34, 38, 38, 1, 0, 41, 42, 2, 0, 40, 40, 54, 54, 2, 0, 55, 55, 57, 61, 590, 0, 110, 1, 0, 0, 0, 2, 113, 1, 0, 0, 0, 4, 130, 1, 0, 0, 0, 6, 145, 1, 0, 0, 0, 8, 147, 1, 0, 0, 0, 10, 178, 1, 0, 0, 0, 12, 205, 1, 0, 0, 0, 14, 212, 1, 0, 0, 0, 16, 218, 1, 0, 0, 0, 18, 239, 1, 0, 0, 0, 20, 249, 1, 0, 0, 0, 22, 264, 1, 0, 0, 0, 24, 266, 1, 0, 0, 0, 26, 269, 1, 0, 0, 0, 28, 282, 1, 0, 0, 0, 30, 284, 1, 0, 0, 0, 32, 299, 1, 0, 0, 0, 34, 301, 1, 0, 0, 0, 36, 310, 1, 0, 0, 0, 38, 316, 1, 0, 0, 0, 40, 318, 1, 0, 0, 0, 42, 327, 1, 0, 0, 0, 44, 331, 1, 0, 0, 0, 46, 347, 1, 0, 0, 0, 48, 350, 1, 0, 0, 0, 50, 358, 1, 0, 0, 0, 52, 364, 1, 0, 0, 0, 54, 372, 1, 0, 0, 0, 56, 380, 1, 0, 0, 0, 58, 382, 1, 0, 0, 0, 60, 426, 1, 0, 0, 0, 62, 428, 1, 0, 0, 0, 64, 431, 1, 0, 0, 0, 66, 440, 1, 0, 0, 0, 68, 448, 1, 0, 0, 0, 70, 457, 1, 0, 0, 0, 72, 466, 1, 0, 0, 0, 74, 475, 1, 0, 0, 0, 76, 479, 1, 0, 0, 0, 78, 485, 1, 0, 0, 0, 80, 489, 1, 0, 0, 0, 82, 492, 1, 0, 0, 0, 84, 500, 1, 0, 0, 0, 86, 504, 1, 0, 0, 0, 88, 508, 1, 0, 0, 0, 90, 511, 1, 0, 0, 0, 92, 516, 1, 0, 0, 0, 94, 520, 1, 0, 0, 0, 96, 522, 1, 0, 0, 0, 98, 524, 1, 0, 0, 0, 100, 527, 1, 0, 0, 0, 102, 531, 1, 0, 0, 0, 104, 534, 1, 0, 0, 0, 106, 537, 1, 0, 0, 0, 108, 557, 1, 0, 0, 0, 110, 111, 3, 2, 1, 0, 111, 112, 5, 0, 0, 1, 112, 1, 1, 0, 0, 0, 113, 114, 6, 1, -1, 0, 114, 115, 3, 4, 2, 0, 115, 121, 1, 0, 0, 0, 116, 117, 10, 1, 0, 0, 117, 118, 5, 28, 0, 0, 118, 120, 3, 6, 3, 0, 119, 116, 1, 0, 0, 0, 120, 123, 1, 0, 0, 0, 121, 119, 1, 0, 0, 0, 121, 122, 1, 0, 0, 0, 122, 3, 1, 0, 0, 0, 123, 121, 1, 0, 0, 0, 124, 131, 3, 98, 49, 0, 125, 131, 3, 30, 15, 0, 126, 131, 3, 24, 12, 0, 127, 131, 3, 44, 22, 0, 128, 131, 3, 102, 51, 0, 129, 131, 3, 104, 52, 0, 130, 124, 1, 0, 0, 0, 130, 125, 1, 0, 0, 0, 130, 126, 1, 0, 0, 0, 130, 127, 1, 0, 0, 0, 130, 128, 1, 0, 0, 0, 130, 129, 1, 0, 0, 0, 131, 5, 1, 0, 0, 0, 132, 146, 3, 46, 23, 0, 133, 146, 3, 50, 25, 0, 134, 146, 3, 62, 31, 0, 135, 146, 3, 68, 34, 0, 136, 146, 3, 64, 32, 0, 137, 146, 3, 48, 24, 0, 138, 146, 3, 8, 4, 0, 139, 146, 3, 70, 35, 0, 140, 146, 3, 72, 36, 0, 141, 146, 3, 76, 38, 0, 142, 146, 3, 78, 39, 0, 143, 146, 3, 106, 53, 0, 144, 146, 3, 80, 40, 0, 145, 132, 1, 0, 0, 0, 145, 133, 1, 0, 0, 0, 145, 134, 1, 0, 0, 0, 145, 135, 1, 0, 0, 0, 145, 136, 1, 0, 0, 0, 145, 137, 1, 0, 0, 0, 145, 138, 1, 0, 0, 0, 145, 139, 1, 0, 0, 0, 145, 140, 1, 0, 0, 0, 145, 141, 1, 0, 0, 0, 145, 142, 1, 0, 0, 0, 145, 143, 1, 0, 0, 0, 145, 144, 1, 0, 0, 0, 146, 7, 1, 0, 0, 0, 147, 148, 5, 19, 0, 0, 148, 149, 3, 10, 5, 0, 149, 9, 1, 0, 0, 0, 150, 151, 6, 5, -1, 0, 151, 152, 5, 47, 0, 0, 152, 179, 3, 10, 5, 7, 153, 179, 3, 14, 7, 0, 154, 179, 3, 12, 6, 0, 155, 157, 3, 14, 7, 0, 156, 158, 5, 47, 0, 0, 157, 156, 1, 0, 0, 0, 157, 158, 1, 0, 0, 0, 158, 159, 1, 0, 0, 0, 159, 160, 5, 44, 0, 0, 160, 161, 5, 43, 0, 0, 161, 166, 3, 14, 7, 0, 162, 163, 5, 37, 0, 0, 163, 165, 3, 14, 7, 0, 164, 162, 1, 0, 0, 0, 165, 168, 1, 0, 0, 0, 166, 164, 1, 0, 0, 0, 166, 167, 1, 0, 0, 0, 167, 169, 1, 0, 0, 0, 168, 166, 1, 0, 0, 0, 169, 170, 5, 53, 0, 0, 170, 179, 1, 0, 0, 0, 171, 172, 3, 14, 7, 0, 172, 174, 5, 45, 0, 0, 173, 175, 5, 47, 0, 0, 174, 173, 1, 0, 0, 0, 174, 175, 1, 0, 0, 0, 175, 176, 1, 0, 0, 0, 176, 177, 5, 48, 0, 0, 177, 179, 1, 0, 0, 0, 178, 150, 1, 0, 0, 0, 178, 153, 1, 0, 0, 0, 178, 154, 1, 0, 0, 0, 178, 155, 1, 0, 0, 0, 178, 171, 1, 0, 0, 0, 179, 188, 1, 0, 0, 0, 180, 181, 10, 4, 0, 0, 181, 182, 5, 33, 0, 0, 182, 187, 3, 10, 5, 5, 183, 184, 10, 3, 0, 0, 184, 185, 5, 50, 0, 0, 185, 187, 3, 10, 5, 4, 186, 180, 1, 0, 0, 0, 186, 183, 1, 0, 0, 0, 187, 190, 1, 0, 0, 0, 188, 186, 1, 0, 0, 0, 188, 189, 1, 0, 0, 0, 189, 11, 1, 0, 0, 0, 190, 188, 1, 0, 0, 0, 191, 193, 3, 14, 7, 0, 192, 194, 5, 47, 0, 0, 193, 192, 1, 0, 0, 0, 193, 194, 1, 0, 0, 0, 194, 195, 1, 0, 0, 0, 195, 196, 5, 46, 0, 0, 196, 197, 3, 94, 47, 0, 197, 206, 1, 0, 0, 0, 198, 200, 3, 14, 7, 0, 199, 201, 5, 47, 0, 0, 200, 199, 1, 0, 0, 0, 200, 201, 1, 0, 0, 0, 201, 202, 1, 0, 0, 0, 202, 203, 5, 52, 0, 0, 203, 204, 3, 94, 47, 0, 204, 206, 1, 0, 0, 0, 205, 191, 1, 0, 0, 0, 205, 198, 1, 0, 0, 0, 206, 13, 1, 0, 0, 0, 207, 213, 3, 16, 8, 0, 208, 209, 3, 16, 8, 0, 209, 210, 3, 96, 48, 0, 210, 211, 3, 16, 8, 0, 211, 213, 1, 0, 0, 0, 212, 207, 1, 0, 0, 0, 212, 208, 1, 0, 0, 0, 213, 15, 1, 0, 0, 0, 214, 215, 6, 8, -1, 0, 215, 219, 3, 18, 9, 0, 216, 217, 7, 0, 0, 0, 217, 219, 3, 16, 8, 3, 218, 214, 1, 0, 0, 0, 218, 216, 1, 0, 0, 0, 219, 228, 1, 0, 0, 0, 220, 221, 10, 2, 0, 0, 221, 222, 7, 1, 0, 0, 222, 227, 3, 16, 8, 3, 223, 224, 10, 1, 0, 0, 224, 225, 7, 0, 0, 0, 225, 227, 3, 16, 8, 2, 226, 220, 1, 0, 0, 0, 226, 223, 1, 0, 0, 0, 227, 230, 1, 0, 0, 0, 228, 226, 1, 0, 0, 0, 228, 229, 1, 0, 0, 0, 229, 17, 1, 0, 0, 0, 230, 228, 1, 0, 0, 0, 231, 232, 6, 9, -1, 0, 232, 240, 3, 60, 30, 0, 233, 240, 3, 52, 26, 0, 234, 240, 3, 20, 10, 0, 235, 236, 5, 43, 0, 0, 236, 237, 3, 10, 5, 0, 237, 238, 5, 53, 0, 0, 238, 240, 1, 0, 0, 0, 239, 231, 1, 0, 0, 0, 239, 233, 1, 0, 0, 0, 239, 234, 1, 0, 0, 0, 239, 235, 1, 0, 0, 0, 240, 246, 1, 0, 0, 0, 241, 242, 10, 1, 0, 0, 242, 243, 5, 36, 0, 0, 243, 245, 3, 22, 11, 0, 244, 241, 1, 0, 0, 0, 245, 248, 1, 0, 0, 0, 246, 244, 1, 0, 0, 0, 246, 247, 1, 0, 0, 0, 247, 19, 1, 0, 0, 0, 248, 246, 1, 0, 0, 0, 249, 250, 3, 56, 28, 0, 250, 260, 5, 43, 0, 0, 251, 261, 5, 64, 0, 0, 252, 257, 3, 10, 5, 0, 253, 254, 5, 37, 0, 0, 254, 256, 3, 10, 5, 0, 255, 253, 1, 0, 0, 0, 256, 259, 1, 0, 0, 0, 257, 255, 1, 0, 0, 0, 257, 258, 1, 0, 0, 0, 258, 261, 1, 0, 0, 0, 259, 257, 1, 0, 0, 0, 260, 251, 1, 0, 0, 0, 260, 252, 1, 0, 0, 0, 260, 261, 1, 0, 0, 0, 261, 262, 1, 0, 0, 0, 262, 263, 5, 53, 0, 0, 263, 21, 1, 0, 0, 0, 264, 265, 3, 56, 28, 0, 265, 23, 1, 0, 0, 0, 266, 267, 5, 15, 0, 0, 267, 268, 3, 26, 13, 0, 268, 25, 1, 0, 0, 0, 269, 274, 3, 28, 14, 0, 270, 271, 5, 37, 0, 0, 271, 273, 3, 28, 14, 0, 272, 270, 1, 0, 0, 0, 273, 276, 1, 0, 0, 0, 274, 272, 1, 0, 0, 0, 274, 275, 1, 0, 0, 0, 275, 27, 1, 0, 0, 0, 276, 274, 1, 0, 0, 0, 277, 283, 3, 10, 5, 0, 278, 279, 3, 52, 26, 0, 279, 280, 5, 35, 0, 0, 280, 281, 3, 10, 5, 0, 281, 283, 1, 0, 0, 0, 282, 277, 1, 0, 0, 0, 282, 278, 1, 0, 0, 0, 283, 29, 1, 0, 0, 0, 284, 285, 5, 6, 0, 0, 285, 290, 3, 32, 16, 0, 286, 287, 5, 37, 0, 0, 287, 289, 3, 32, 16, 0, 288, 286, 1, 0, 0, 0, 289, 292, 1, 0, 0, 0, 290, 288, 1, 0, 0, 0, 290, 291, 1, 0, 0, 0, 291, 294, 1, 0, 0, 0, 292, 290, 1, 0, 0, 0, 293, 295, 3, 38, 19, 0, 294, 293, 1, 0, 0, 0, 294, 295, 1, 0, 0, 0, 295, 297, 1, 0, 0, 0, 296, 298, 3, 34, 17, 0, 297, 296, 1, 0, 0, 0, 297, 298, 1, 0, 0, 0, 298, 31, 1, 0, 0, 0, 299, 300, 5, 24, 0, 0, 300, 33, 1, 0, 0, 0, 301, 302, 5, 74, 0, 0, 302, 307, 3, 36, 18, 0, 303, 304, 5, 37, 0, 0, 304, 306, 3, 36, 18, 0, 305, 303, 1, 0, 0, 0, 306, 309, 1, 0, 0, 0, 307, 305, 1, 0, 0, 0, 307, 308, 1, 0, 0, 0, 308, 35, 1, 0, 0, 0, 309, 307, 1, 0, 0, 0, 310, 311, 3, 94, 47, 0, 311, 312, 5, 35, 0, 0, 312, 313, 3, 94, 47, 0, 313, 37, 1, 0, 0, 0, 314, 317, 3, 40, 20, 0, 315, 317, 3, 42, 21, 0, 316, 314, 1, 0, 0, 0, 316, 315, 1, 0, 0, 0, 317, 39, 1, 0, 0, 0, 318, 319, 5, 75, 0, 0, 319, 324, 3, 32, 16, 0, 320, 321, 5, 37, 0, 0, 321, 323, 3, 32, 16, 0, 322, 320, 1, 0, 0, 0, 323, 326, 1, 0, 0, 0, 324, 322, 1, 0, 0, 0, 324, 325, 1, 0, 0, 0, 325, 41, 1, 0, 0, 0, 326, 324, 1, 0, 0, 0, 327, 328, 5, 67, 0, 0, 328, 329, 3, 40, 20, 0, 329, 330, 5, 68, 0, 0, 330, 43, 1, 0, 0, 0, 331, 332, 5, 12, 0, 0, 332, 337, 3, 32, 16, 0, 333, 334, 5, 37, 0, 0, 334, 336, 3, 32, 16, 0, 335, 333, 1, 0, 0, 0, 336, 339, 1, 0, 0, 0, 337, 335, 1, 0, 0, 0, 337, 338, 1, 0, 0, 0, 338, 341, 1, 0, 0, 0, 339, 337, 1, 0, 0, 0, 340, 342, 3, 26, 13, 0, 341, 340, 1, 0, 0, 0, 341, 342, 1, 0, 0, 0, 342, 345, 1, 0, 0, 0, 343, 344, 5, 32, 0, 0, 344, 346, 3, 26, 13, 0, 345, 343, 1, 0, 0, 0, 345, 346, 1, 0, 0, 0, 346, 45, 1, 0, 0, 0, 347, 348, 5, 4, 0, 0, 348, 349, 3, 26, 13, 0, 349, 47, 1, 0, 0, 0, 350, 352, 5, 18, 0, 0, 351, 353, 3, 26, 13, 0, 352, 351, 1, 0, 0, 0, 352, 353, 1, 0, 0, 0, 353, 356, 1, 0, 0, 0, 354, 355, 5, 32, 0, 0, 355, 357, 3, 26, 13, 0, 356, 354, 1, 0, 0, 0, 356, 357, 1, 0, 0, 0, 357, 49, 1, 0, 0, 0, 358, 359, 5, 8, 0, 0, 359, 362, 3, 26, 13, 0, 360, 361, 5, 32, 0, 0, 361, 363, 3, 26, 13, 0, 362, 360, 1, 0, 0, 0, 362, 363, 1, 0, 0, 0, 363, 51, 1, 0, 0, 0, 364, 369, 3, 56, 28, 0, 365, 366, 5, 39, 0, 0, 366, 368, 3, 56, 28, 0, 367, 365, 1, 0, 0, 0, 368, 371, 1, 0, 0, 0, 369, 367, 1, 0, 0, 0, 369, 370, 1, 0, 0, 0, 370, 53, 1, 0, 0, 0, 371, 369, 1, 0, 0, 0, 372, 377, 3, 58, 29, 0, 373, 374, 5, 39, 0, 0, 374, 376, 3, 58, 29, 0, 375, 373, 1, 0, 0, 0, 376, 379, 1, 0, 0, 0, 377, 375, 1, 0, 0, 0, 377, 378, 1, 0, 0, 0, 378, 55, 1, 0, 0, 0, 379, 377, 1, 0, 0, 0, 380, 381, 7, 2, 0, 0, 381, 57, 1, 0, 0, 0, 382, 383, 5, 79, 0, 0, 383, 59, 1, 0, 0, 0, 384, 427, 5, 48, 0, 0, 385, 386, 3, 92, 46, 0, 386, 387, 5, 69, 0, 0, 387, 427, 1, 0, 0, 0, 388, 427, 3, 90, 45, 0, 389, 427, 3, 92, 46, 0, 390, 427, 3, 86, 43, 0, 391, 427, 5, 51, 0, 0, 392, 427, 3, 94, 47, 0, 393, 394, 5, 67, 0, 0, 394, 399, 3, 88, 44, 0, 395, 396, 5, 37, 0, 0, 396, 398, 3, 88, 44, 0, 397, 395, 1, 0, 0, 0, 398, 401, 1, 0, 0, 0, 399, 397, 1, 0, 0, 0, 399, 400, 1, 0, 0, 0, 400, 402, 1, 0, 0, 0, 401, 399, 1, 0, 0, 0, 402, 403, 5, 68, 0, 0, 403, 427, 1, 0, 0, 0, 404, 405, 5, 67, 0, 0, 405, 410, 3, 86, 43, 0, 406, 407, 5, 37, 0, 0, 407, 409, 3, 86, 43, 0, 408, 406, 1, 0, 0, 0, 409, 412, 1, 0, 0, 0, 410, 408, 1, 0, 0, 0, 410, 411, 1, 0, 0, 0, 411, 413, 1, 0, 0, 0, 412, 410, 1, 0, 0, 0, 413, 414, 5, 68, 0, 0, 414, 427, 1, 0, 0, 0, 415, 416, 5, 67, 0, 0, 416, 421, 3, 94, 47, 0, 417, 418, 5, 37, 0, 0, 418, 420, 3, 94, 47, 0, 419, 417, 1, 0, 0, 0, 420, 423, 1, 0, 0, 0, 421, 419, 1, 0, 0, 0, 421, 422, 1, 0, 0, 0, 422, 424, 1, 0, 0, 0, 423, 421, 1, 0, 0, 0, 424, 425, 5, 68, 0, 0, 425, 427, 1, 0, 0, 0, 426, 384, 1, 0, 0, 0, 426, 385, 1, 0, 0, 0, 426, 388, 1, 0, 0, 0, 426, 389, 1, 0, 0, 0, 426, 390, 1, 0, 0, 0, 426, 391, 1, 0, 0, 0, 426, 392, 1, 0, 0, 0, 426, 393, 1, 0, 0, 0, 426, 404, 1, 0, 0, 0, 426, 415, 1, 0, 0, 0, 427, 61, 1, 0, 0, 0, 428, 429, 5, 10, 0, 0, 429, 430, 5, 30, 0, 0, 430, 63, 1, 0, 0, 0, 431, 432, 5, 17, 0, 0, 432, 437, 3, 66, 33, 0, 433, 434, 5, 37, 0, 0, 434, 436, 3, 66, 33, 0, 435, 433, 1, 0, 0, 0, 436, 439, 1, 0, 0, 0, 437, 435, 1, 0, 0, 0, 437, 438, 1, 0, 0, 0, 438, 65, 1, 0, 0, 0, 439, 437, 1, 0, 0, 0, 440, 442, 3, 10, 5, 0, 441, 443, 7, 3, 0, 0, 442, 441, 1, 0, 0, 0, 442, 443, 1, 0, 0, 0, 443, 446, 1, 0, 0, 0, 444, 445, 5, 49, 0, 0, 445, 447, 7, 4, 0, 0, 446, 444, 1, 0, 0, 0, 446, 447, 1, 0, 0, 0, 447, 67, 1, 0, 0, 0, 448, 449, 5, 9, 0, 0, 449, 454, 3, 54, 27, 0, 450, 451, 5, 37, 0, 0, 451, 453, 3, 54, 27, 0, 452, 450, 1, 0, 0, 0, 453, 456, 1, 0, 0, 0, 454, 452, 1, 0, 0, 0, 454, 455, 1, 0, 0, 0, 455, 69, 1, 0, 0, 0, 456, 454, 1, 0, 0, 0, 457, 458, 5, 2, 0, 0, 458, 463, 3, 54, 27, 0, 459, 460, 5, 37, 0, 0, 460, 462, 3, 54, 27, 0, 461, 459, 1, 0, 0, 0, 462, 465, 1, 0, 0, 0, 463, 461, 1, 0, 0, 0, 463, 464, 1, 0, 0, 0, 464, 71, 1, 0, 0, 0, 465, 463, 1, 0, 0, 0, 466, 467, 5, 14, 0, 0, 467, 472, 3, 74, 37, 0, 468, 469, 5, 37, 0, 0, 469, 471, 3, 74, 37, 0, 470, 468, 1, 0, 0, 0, 471, 474, 1, 0, 0, 0, 472, 470, 1, 0, 0, 0, 472, 473, 1, 0, 0, 0, 473, 73, 1, 0, 0, 0, 474, 472, 1, 0, 0, 0, 475, 476, 3, 54, 27, 0, 476, 477, 5, 83, 0, 0, 477, 478, 3, 54, 27, 0, 478, 75, 1, 0, 0, 0, 479, 480, 5, 1, 0, 0, 480, 481, 3, 18, 9, 0, 481, 483, 3, 94, 47, 0, 482, 484, 3, 82, 41, 0, 483, 482, 1, 0, 0, 0, 483, 484, 1, 0, 0, 0, 484, 77, 1, 0, 0, 0, 485, 486, 5, 7, 0, 0, 486, 487, 3, 18, 9, 0, 487, 488, 3, 94, 47, 0, 488, 79, 1, 0, 0, 0, 489, 490, 5, 13, 0, 0, 490, 491, 3, 52, 26, 0, 491, 81, 1, 0, 0, 0, 492, 497, 3, 84, 42, 0, 493, 494, 5, 37, 0, 0, 494, 496, 3, 84, 42, 0, 495, 493, 1, 0, 0, 0, 496, 499, 1, 0, 0, 0, 497, 495, 1, 0, 0, 0, 497, 498, 1, 0, 0, 0, 498, 83, 1, 0, 0, 0, 499, 497, 1, 0, 0, 0, 500, 501, 3, 56, 28, 0, 501, 502, 5, 35, 0, 0, 502, 503, 3, 60, 30, 0, 503, 85, 1, 0, 0, 0, 504, 505, 7, 5, 0, 0, 505, 87, 1, 0, 0, 0, 506, 509, 3, 90, 45, 0, 507, 509, 3, 92, 46, 0, 508, 506, 1, 0, 0, 0, 508, 507, 1, 0, 0, 0, 509, 89, 1, 0, 0, 0, 510, 512, 7, 0, 0, 0, 511, 510, 1, 0, 0, 0, 511, 512, 1, 0, 0, 0, 512, 513, 1, 0, 0, 0, 513, 514, 5, 31, 0, 0, 514, 91, 1, 0, 0, 0, 515, 517, 7, 0, 0, 0, 516, 515, 1, 0, 0, 0, 516, 517, 1, 0, 0, 0, 517, 518, 1, 0, 0, 0, 518, 519, 5, 30, 0, 0, 519, 93, 1, 0, 0, 0, 520, 521, 5, 29, 0, 0, 521, 95, 1, 0, 0, 0, 522, 523, 7, 6, 0, 0, 523, 97, 1, 0, 0, 0, 524, 525, 5, 5, 0, 0, 525, 526, 3, 100, 50, 0, 526, 99, 1, 0, 0, 0, 527, 528, 5, 67, 0, 0, 528, 529, 3, 2, 1, 0, 529, 530, 5, 68, 0, 0, 530, 101, 1, 0, 0, 0, 531, 532, 5, 16, 0, 0, 532, 533, 5, 99, 0, 0, 533, 103, 1, 0, 0, 0, 534, 535, 5, 11, 0, 0, 535, 536, 5, 103, 0, 0, 536, 105, 1, 0, 0, 0, 537, 538, 5, 3, 0, 0, 538, 541, 5, 89, 0, 0, 539, 540, 5, 87, 0, 0, 540, 542, 3, 54, 27, 0, 541, 539, 1, 0, 0, 0, 541, 542, 1, 0, 0, 0, 542, 552, 1, 0, 0, 0, 543, 544, 5, 88, 0, 0, 544, 549, 3, 108, 54, 0, 545, 546, 5, 37, 0, 0, 546, 548, 3, 108, 54, 0, 547, 545, 1, 0, 0, 0, 548, 551, 1, 0, 0, 0, 549, 547, 1, 0, 0, 0, 549, 550, 1, 0, 0, 0, 550, 553, 1, 0, 0, 0, 551, 549, 1, 0, 0, 0, 552, 543, 1, 0, 0, 0, 552, 553, 1, 0, 0, 0, 553, 107, 1, 0, 0, 0, 554, 555, 3, 54, 27, 0, 555, 556, 5, 35, 0, 0, 556, 558, 1, 0, 0, 0, 557, 554, 1, 0, 0, 0, 557, 558, 1, 0, 0, 0, 558, 559, 1, 0, 0, 0, 559, 560, 3, 54, 27, 0, 560, 109, 1, 0, 0, 0, 55, 121, 130, 145, 157, 166, 174, 178, 186, 188, 193, 200, 205, 212, 218, 226, 228, 239, 246, 257, 260, 274, 282, 290, 294, 297, 307, 316, 324, 337, 341, 345, 352, 356, 362, 369, 377, 399, 410, 421, 426, 437, 442, 446, 454, 463, 472, 483, 497, 508, 511, 516, 541, 549, 552, 557] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java index 1f9c13c16cdd4..7cf25b86ded5c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java @@ -18,58 +18,60 @@ public class EsqlBaseParser extends Parser { new PredictionContextCache(); public static final int DISSECT=1, DROP=2, ENRICH=3, EVAL=4, EXPLAIN=5, FROM=6, GROK=7, INLINESTATS=8, - KEEP=9, LIMIT=10, META=11, MV_EXPAND=12, RENAME=13, ROW=14, SHOW=15, SORT=16, - STATS=17, WHERE=18, UNKNOWN_CMD=19, LINE_COMMENT=20, MULTILINE_COMMENT=21, - WS=22, EXPLAIN_WS=23, EXPLAIN_LINE_COMMENT=24, EXPLAIN_MULTILINE_COMMENT=25, - PIPE=26, QUOTED_STRING=27, INTEGER_LITERAL=28, DECIMAL_LITERAL=29, BY=30, - AND=31, ASC=32, ASSIGN=33, CAST_OP=34, COMMA=35, DESC=36, DOT=37, FALSE=38, - FIRST=39, LAST=40, LP=41, IN=42, IS=43, LIKE=44, NOT=45, NULL=46, NULLS=47, - OR=48, PARAM=49, RLIKE=50, RP=51, TRUE=52, EQ=53, CIEQ=54, NEQ=55, LT=56, - LTE=57, GT=58, GTE=59, PLUS=60, MINUS=61, ASTERISK=62, SLASH=63, PERCENT=64, - OPENING_BRACKET=65, CLOSING_BRACKET=66, UNQUOTED_IDENTIFIER=67, QUOTED_IDENTIFIER=68, - EXPR_LINE_COMMENT=69, EXPR_MULTILINE_COMMENT=70, EXPR_WS=71, OPTIONS=72, - METADATA=73, FROM_UNQUOTED_IDENTIFIER=74, FROM_LINE_COMMENT=75, FROM_MULTILINE_COMMENT=76, - FROM_WS=77, ID_PATTERN=78, PROJECT_LINE_COMMENT=79, PROJECT_MULTILINE_COMMENT=80, - PROJECT_WS=81, AS=82, RENAME_LINE_COMMENT=83, RENAME_MULTILINE_COMMENT=84, - RENAME_WS=85, ON=86, WITH=87, ENRICH_POLICY_NAME=88, ENRICH_LINE_COMMENT=89, - ENRICH_MULTILINE_COMMENT=90, ENRICH_WS=91, ENRICH_FIELD_LINE_COMMENT=92, - ENRICH_FIELD_MULTILINE_COMMENT=93, ENRICH_FIELD_WS=94, MVEXPAND_LINE_COMMENT=95, - MVEXPAND_MULTILINE_COMMENT=96, MVEXPAND_WS=97, INFO=98, SHOW_LINE_COMMENT=99, - SHOW_MULTILINE_COMMENT=100, SHOW_WS=101, FUNCTIONS=102, META_LINE_COMMENT=103, - META_MULTILINE_COMMENT=104, META_WS=105, COLON=106, SETTING=107, SETTING_LINE_COMMENT=108, - SETTTING_MULTILINE_COMMENT=109, SETTING_WS=110; + KEEP=9, LIMIT=10, META=11, METRICS=12, MV_EXPAND=13, RENAME=14, ROW=15, + SHOW=16, SORT=17, STATS=18, WHERE=19, UNKNOWN_CMD=20, LINE_COMMENT=21, + MULTILINE_COMMENT=22, WS=23, INDEX_UNQUOTED_IDENTIFIER=24, EXPLAIN_WS=25, + EXPLAIN_LINE_COMMENT=26, EXPLAIN_MULTILINE_COMMENT=27, PIPE=28, QUOTED_STRING=29, + INTEGER_LITERAL=30, DECIMAL_LITERAL=31, BY=32, AND=33, ASC=34, ASSIGN=35, + CAST_OP=36, COMMA=37, DESC=38, DOT=39, FALSE=40, FIRST=41, LAST=42, LP=43, + IN=44, IS=45, LIKE=46, NOT=47, NULL=48, NULLS=49, OR=50, PARAM=51, RLIKE=52, + RP=53, TRUE=54, EQ=55, CIEQ=56, NEQ=57, LT=58, LTE=59, GT=60, GTE=61, + PLUS=62, MINUS=63, ASTERISK=64, SLASH=65, PERCENT=66, OPENING_BRACKET=67, + CLOSING_BRACKET=68, UNQUOTED_IDENTIFIER=69, QUOTED_IDENTIFIER=70, EXPR_LINE_COMMENT=71, + EXPR_MULTILINE_COMMENT=72, EXPR_WS=73, OPTIONS=74, METADATA=75, FROM_LINE_COMMENT=76, + FROM_MULTILINE_COMMENT=77, FROM_WS=78, ID_PATTERN=79, PROJECT_LINE_COMMENT=80, + PROJECT_MULTILINE_COMMENT=81, PROJECT_WS=82, AS=83, RENAME_LINE_COMMENT=84, + RENAME_MULTILINE_COMMENT=85, RENAME_WS=86, ON=87, WITH=88, ENRICH_POLICY_NAME=89, + ENRICH_LINE_COMMENT=90, ENRICH_MULTILINE_COMMENT=91, ENRICH_WS=92, ENRICH_FIELD_LINE_COMMENT=93, + ENRICH_FIELD_MULTILINE_COMMENT=94, ENRICH_FIELD_WS=95, MVEXPAND_LINE_COMMENT=96, + MVEXPAND_MULTILINE_COMMENT=97, MVEXPAND_WS=98, INFO=99, SHOW_LINE_COMMENT=100, + SHOW_MULTILINE_COMMENT=101, SHOW_WS=102, FUNCTIONS=103, META_LINE_COMMENT=104, + META_MULTILINE_COMMENT=105, META_WS=106, COLON=107, SETTING=108, SETTING_LINE_COMMENT=109, + SETTTING_MULTILINE_COMMENT=110, SETTING_WS=111, METRICS_LINE_COMMENT=112, + METRICS_MULTILINE_COMMENT=113, METRICS_WS=114, CLOSING_METRICS_LINE_COMMENT=115, + CLOSING_METRICS_MULTILINE_COMMENT=116, CLOSING_METRICS_WS=117; public static final int RULE_singleStatement = 0, RULE_query = 1, RULE_sourceCommand = 2, RULE_processingCommand = 3, RULE_whereCommand = 4, RULE_booleanExpression = 5, RULE_regexBooleanExpression = 6, RULE_valueExpression = 7, RULE_operatorExpression = 8, RULE_primaryExpression = 9, RULE_functionExpression = 10, RULE_dataType = 11, RULE_rowCommand = 12, - RULE_fields = 13, RULE_field = 14, RULE_fromCommand = 15, RULE_fromIdentifier = 16, + RULE_fields = 13, RULE_field = 14, RULE_fromCommand = 15, RULE_indexIdentifier = 16, RULE_fromOptions = 17, RULE_configOption = 18, RULE_metadata = 19, RULE_metadataOption = 20, - RULE_deprecated_metadata = 21, RULE_evalCommand = 22, RULE_statsCommand = 23, - RULE_inlinestatsCommand = 24, RULE_qualifiedName = 25, RULE_qualifiedNamePattern = 26, - RULE_identifier = 27, RULE_identifierPattern = 28, RULE_constant = 29, - RULE_limitCommand = 30, RULE_sortCommand = 31, RULE_orderExpression = 32, - RULE_keepCommand = 33, RULE_dropCommand = 34, RULE_renameCommand = 35, - RULE_renameClause = 36, RULE_dissectCommand = 37, RULE_grokCommand = 38, - RULE_mvExpandCommand = 39, RULE_commandOptions = 40, RULE_commandOption = 41, - RULE_booleanValue = 42, RULE_numericValue = 43, RULE_decimalValue = 44, - RULE_integerValue = 45, RULE_string = 46, RULE_comparisonOperator = 47, - RULE_explainCommand = 48, RULE_subqueryExpression = 49, RULE_showCommand = 50, - RULE_metaCommand = 51, RULE_enrichCommand = 52, RULE_enrichWithClause = 53; + RULE_deprecated_metadata = 21, RULE_metricsCommand = 22, RULE_evalCommand = 23, + RULE_statsCommand = 24, RULE_inlinestatsCommand = 25, RULE_qualifiedName = 26, + RULE_qualifiedNamePattern = 27, RULE_identifier = 28, RULE_identifierPattern = 29, + RULE_constant = 30, RULE_limitCommand = 31, RULE_sortCommand = 32, RULE_orderExpression = 33, + RULE_keepCommand = 34, RULE_dropCommand = 35, RULE_renameCommand = 36, + RULE_renameClause = 37, RULE_dissectCommand = 38, RULE_grokCommand = 39, + RULE_mvExpandCommand = 40, RULE_commandOptions = 41, RULE_commandOption = 42, + RULE_booleanValue = 43, RULE_numericValue = 44, RULE_decimalValue = 45, + RULE_integerValue = 46, RULE_string = 47, RULE_comparisonOperator = 48, + RULE_explainCommand = 49, RULE_subqueryExpression = 50, RULE_showCommand = 51, + RULE_metaCommand = 52, RULE_enrichCommand = 53, RULE_enrichWithClause = 54; private static String[] makeRuleNames() { return new String[] { "singleStatement", "query", "sourceCommand", "processingCommand", "whereCommand", "booleanExpression", "regexBooleanExpression", "valueExpression", "operatorExpression", "primaryExpression", "functionExpression", "dataType", "rowCommand", - "fields", "field", "fromCommand", "fromIdentifier", "fromOptions", "configOption", - "metadata", "metadataOption", "deprecated_metadata", "evalCommand", "statsCommand", - "inlinestatsCommand", "qualifiedName", "qualifiedNamePattern", "identifier", - "identifierPattern", "constant", "limitCommand", "sortCommand", "orderExpression", - "keepCommand", "dropCommand", "renameCommand", "renameClause", "dissectCommand", - "grokCommand", "mvExpandCommand", "commandOptions", "commandOption", - "booleanValue", "numericValue", "decimalValue", "integerValue", "string", - "comparisonOperator", "explainCommand", "subqueryExpression", "showCommand", - "metaCommand", "enrichCommand", "enrichWithClause" + "fields", "field", "fromCommand", "indexIdentifier", "fromOptions", "configOption", + "metadata", "metadataOption", "deprecated_metadata", "metricsCommand", + "evalCommand", "statsCommand", "inlinestatsCommand", "qualifiedName", + "qualifiedNamePattern", "identifier", "identifierPattern", "constant", + "limitCommand", "sortCommand", "orderExpression", "keepCommand", "dropCommand", + "renameCommand", "renameClause", "dissectCommand", "grokCommand", "mvExpandCommand", + "commandOptions", "commandOption", "booleanValue", "numericValue", "decimalValue", + "integerValue", "string", "comparisonOperator", "explainCommand", "subqueryExpression", + "showCommand", "metaCommand", "enrichCommand", "enrichWithClause" }; } public static final String[] ruleNames = makeRuleNames(); @@ -77,15 +79,15 @@ private static String[] makeRuleNames() { private static String[] makeLiteralNames() { return new String[] { null, "'dissect'", "'drop'", "'enrich'", "'eval'", "'explain'", "'from'", - "'grok'", "'inlinestats'", "'keep'", "'limit'", "'meta'", "'mv_expand'", - "'rename'", "'row'", "'show'", "'sort'", "'stats'", "'where'", null, - null, null, null, null, null, null, "'|'", null, null, null, "'by'", - "'and'", "'asc'", "'='", "'::'", "','", "'desc'", "'.'", "'false'", "'first'", - "'last'", "'('", "'in'", "'is'", "'like'", "'not'", "'null'", "'nulls'", - "'or'", "'?'", "'rlike'", "')'", "'true'", "'=='", "'=~'", "'!='", "'<'", - "'<='", "'>'", "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", null, "']'", - null, null, null, null, null, "'options'", "'metadata'", null, null, - null, null, null, null, null, null, "'as'", null, null, null, "'on'", + "'grok'", "'inlinestats'", "'keep'", "'limit'", "'meta'", "'metrics'", + "'mv_expand'", "'rename'", "'row'", "'show'", "'sort'", "'stats'", "'where'", + null, null, null, null, null, null, null, null, "'|'", null, null, null, + "'by'", "'and'", "'asc'", "'='", "'::'", "','", "'desc'", "'.'", "'false'", + "'first'", "'last'", "'('", "'in'", "'is'", "'like'", "'not'", "'null'", + "'nulls'", "'or'", "'?'", "'rlike'", "')'", "'true'", "'=='", "'=~'", + "'!='", "'<'", "'<='", "'>'", "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", + null, "']'", null, null, null, null, null, "'options'", "'metadata'", + null, null, null, null, null, null, null, "'as'", null, null, null, "'on'", "'with'", null, null, null, null, null, null, null, null, null, null, "'info'", null, null, null, "'functions'", null, null, null, "':'" }; @@ -94,25 +96,28 @@ private static String[] makeLiteralNames() { private static String[] makeSymbolicNames() { return new String[] { null, "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", - "INLINESTATS", "KEEP", "LIMIT", "META", "MV_EXPAND", "RENAME", "ROW", - "SHOW", "SORT", "STATS", "WHERE", "UNKNOWN_CMD", "LINE_COMMENT", "MULTILINE_COMMENT", - "WS", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", - "PIPE", "QUOTED_STRING", "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", - "AND", "ASC", "ASSIGN", "CAST_OP", "COMMA", "DESC", "DOT", "FALSE", "FIRST", - "LAST", "LP", "IN", "IS", "LIKE", "NOT", "NULL", "NULLS", "OR", "PARAM", - "RLIKE", "RP", "TRUE", "EQ", "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", - "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", "OPENING_BRACKET", "CLOSING_BRACKET", - "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", - "EXPR_WS", "OPTIONS", "METADATA", "FROM_UNQUOTED_IDENTIFIER", "FROM_LINE_COMMENT", - "FROM_MULTILINE_COMMENT", "FROM_WS", "ID_PATTERN", "PROJECT_LINE_COMMENT", - "PROJECT_MULTILINE_COMMENT", "PROJECT_WS", "AS", "RENAME_LINE_COMMENT", - "RENAME_MULTILINE_COMMENT", "RENAME_WS", "ON", "WITH", "ENRICH_POLICY_NAME", - "ENRICH_LINE_COMMENT", "ENRICH_MULTILINE_COMMENT", "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", + "INLINESTATS", "KEEP", "LIMIT", "META", "METRICS", "MV_EXPAND", "RENAME", + "ROW", "SHOW", "SORT", "STATS", "WHERE", "UNKNOWN_CMD", "LINE_COMMENT", + "MULTILINE_COMMENT", "WS", "INDEX_UNQUOTED_IDENTIFIER", "EXPLAIN_WS", + "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", "PIPE", "QUOTED_STRING", + "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", + "COMMA", "DESC", "DOT", "FALSE", "FIRST", "LAST", "LP", "IN", "IS", "LIKE", + "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", "EQ", "CIEQ", + "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", + "PERCENT", "OPENING_BRACKET", "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", + "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", "EXPR_WS", + "OPTIONS", "METADATA", "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", + "FROM_WS", "ID_PATTERN", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", + "PROJECT_WS", "AS", "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", + "RENAME_WS", "ON", "WITH", "ENRICH_POLICY_NAME", "ENRICH_LINE_COMMENT", + "ENRICH_MULTILINE_COMMENT", "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", "MVEXPAND_LINE_COMMENT", "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", "INFO", "SHOW_LINE_COMMENT", "SHOW_MULTILINE_COMMENT", "SHOW_WS", "FUNCTIONS", "META_LINE_COMMENT", "META_MULTILINE_COMMENT", "META_WS", "COLON", "SETTING", "SETTING_LINE_COMMENT", - "SETTTING_MULTILINE_COMMENT", "SETTING_WS" + "SETTTING_MULTILINE_COMMENT", "SETTING_WS", "METRICS_LINE_COMMENT", "METRICS_MULTILINE_COMMENT", + "METRICS_WS", "CLOSING_METRICS_LINE_COMMENT", "CLOSING_METRICS_MULTILINE_COMMENT", + "CLOSING_METRICS_WS" }; } private static final String[] _SYMBOLIC_NAMES = makeSymbolicNames(); @@ -199,9 +204,9 @@ public final SingleStatementContext singleStatement() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(108); + setState(110); query(0); - setState(109); + setState(111); match(EOF); } } @@ -297,11 +302,11 @@ private QueryContext query(int _p) throws RecognitionException { _ctx = _localctx; _prevctx = _localctx; - setState(112); + setState(114); sourceCommand(); } _ctx.stop = _input.LT(-1); - setState(119); + setState(121); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,0,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -312,16 +317,16 @@ private QueryContext query(int _p) throws RecognitionException { { _localctx = new CompositeQueryContext(new QueryContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_query); - setState(114); + setState(116); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(115); + setState(117); match(PIPE); - setState(116); + setState(118); processingCommand(); } } } - setState(121); + setState(123); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,0,_ctx); } @@ -349,6 +354,9 @@ public FromCommandContext fromCommand() { public RowCommandContext rowCommand() { return getRuleContext(RowCommandContext.class,0); } + public MetricsCommandContext metricsCommand() { + return getRuleContext(MetricsCommandContext.class,0); + } public ShowCommandContext showCommand() { return getRuleContext(ShowCommandContext.class,0); } @@ -379,41 +387,48 @@ public final SourceCommandContext sourceCommand() throws RecognitionException { SourceCommandContext _localctx = new SourceCommandContext(_ctx, getState()); enterRule(_localctx, 4, RULE_sourceCommand); try { - setState(127); + setState(130); _errHandler.sync(this); switch (_input.LA(1)) { case EXPLAIN: enterOuterAlt(_localctx, 1); { - setState(122); + setState(124); explainCommand(); } break; case FROM: enterOuterAlt(_localctx, 2); { - setState(123); + setState(125); fromCommand(); } break; case ROW: enterOuterAlt(_localctx, 3); { - setState(124); + setState(126); rowCommand(); } break; - case SHOW: + case METRICS: enterOuterAlt(_localctx, 4); { - setState(125); + setState(127); + metricsCommand(); + } + break; + case SHOW: + enterOuterAlt(_localctx, 5); + { + setState(128); showCommand(); } break; case META: - enterOuterAlt(_localctx, 5); + enterOuterAlt(_localctx, 6); { - setState(126); + setState(129); metaCommand(); } break; @@ -497,97 +512,97 @@ public final ProcessingCommandContext processingCommand() throws RecognitionExce ProcessingCommandContext _localctx = new ProcessingCommandContext(_ctx, getState()); enterRule(_localctx, 6, RULE_processingCommand); try { - setState(142); + setState(145); _errHandler.sync(this); switch (_input.LA(1)) { case EVAL: enterOuterAlt(_localctx, 1); { - setState(129); + setState(132); evalCommand(); } break; case INLINESTATS: enterOuterAlt(_localctx, 2); { - setState(130); + setState(133); inlinestatsCommand(); } break; case LIMIT: enterOuterAlt(_localctx, 3); { - setState(131); + setState(134); limitCommand(); } break; case KEEP: enterOuterAlt(_localctx, 4); { - setState(132); + setState(135); keepCommand(); } break; case SORT: enterOuterAlt(_localctx, 5); { - setState(133); + setState(136); sortCommand(); } break; case STATS: enterOuterAlt(_localctx, 6); { - setState(134); + setState(137); statsCommand(); } break; case WHERE: enterOuterAlt(_localctx, 7); { - setState(135); + setState(138); whereCommand(); } break; case DROP: enterOuterAlt(_localctx, 8); { - setState(136); + setState(139); dropCommand(); } break; case RENAME: enterOuterAlt(_localctx, 9); { - setState(137); + setState(140); renameCommand(); } break; case DISSECT: enterOuterAlt(_localctx, 10); { - setState(138); + setState(141); dissectCommand(); } break; case GROK: enterOuterAlt(_localctx, 11); { - setState(139); + setState(142); grokCommand(); } break; case ENRICH: enterOuterAlt(_localctx, 12); { - setState(140); + setState(143); enrichCommand(); } break; case MV_EXPAND: enterOuterAlt(_localctx, 13); { - setState(141); + setState(144); mvExpandCommand(); } break; @@ -638,9 +653,9 @@ public final WhereCommandContext whereCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(144); + setState(147); match(WHERE); - setState(145); + setState(148); booleanExpression(0); } } @@ -835,7 +850,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc int _alt; enterOuterAlt(_localctx, 1); { - setState(175); + setState(178); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,6,_ctx) ) { case 1: @@ -844,9 +859,9 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _ctx = _localctx; _prevctx = _localctx; - setState(148); + setState(151); match(NOT); - setState(149); + setState(152); booleanExpression(7); } break; @@ -855,7 +870,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new BooleanDefaultContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(150); + setState(153); valueExpression(); } break; @@ -864,7 +879,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new RegexExpressionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(151); + setState(154); regexBooleanExpression(); } break; @@ -873,41 +888,41 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalInContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(152); + setState(155); valueExpression(); - setState(154); + setState(157); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(153); + setState(156); match(NOT); } } - setState(156); + setState(159); match(IN); - setState(157); + setState(160); match(LP); - setState(158); + setState(161); valueExpression(); - setState(163); + setState(166); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(159); + setState(162); match(COMMA); - setState(160); + setState(163); valueExpression(); } } - setState(165); + setState(168); _errHandler.sync(this); _la = _input.LA(1); } - setState(166); + setState(169); match(RP); } break; @@ -916,27 +931,27 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new IsNullContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(168); + setState(171); valueExpression(); - setState(169); + setState(172); match(IS); - setState(171); + setState(174); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(170); + setState(173); match(NOT); } } - setState(173); + setState(176); match(NULL); } break; } _ctx.stop = _input.LT(-1); - setState(185); + setState(188); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,8,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -944,7 +959,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(183); + setState(186); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,7,_ctx) ) { case 1: @@ -952,11 +967,11 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(177); + setState(180); if (!(precpred(_ctx, 4))) throw new FailedPredicateException(this, "precpred(_ctx, 4)"); - setState(178); + setState(181); ((LogicalBinaryContext)_localctx).operator = match(AND); - setState(179); + setState(182); ((LogicalBinaryContext)_localctx).right = booleanExpression(5); } break; @@ -965,18 +980,18 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(180); + setState(183); if (!(precpred(_ctx, 3))) throw new FailedPredicateException(this, "precpred(_ctx, 3)"); - setState(181); + setState(184); ((LogicalBinaryContext)_localctx).operator = match(OR); - setState(182); + setState(185); ((LogicalBinaryContext)_localctx).right = booleanExpression(4); } break; } } } - setState(187); + setState(190); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,8,_ctx); } @@ -1031,48 +1046,48 @@ public final RegexBooleanExpressionContext regexBooleanExpression() throws Recog enterRule(_localctx, 12, RULE_regexBooleanExpression); int _la; try { - setState(202); + setState(205); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,11,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(188); + setState(191); valueExpression(); - setState(190); + setState(193); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(189); + setState(192); match(NOT); } } - setState(192); + setState(195); ((RegexBooleanExpressionContext)_localctx).kind = match(LIKE); - setState(193); + setState(196); ((RegexBooleanExpressionContext)_localctx).pattern = string(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(195); + setState(198); valueExpression(); - setState(197); + setState(200); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(196); + setState(199); match(NOT); } } - setState(199); + setState(202); ((RegexBooleanExpressionContext)_localctx).kind = match(RLIKE); - setState(200); + setState(203); ((RegexBooleanExpressionContext)_localctx).pattern = string(); } break; @@ -1158,14 +1173,14 @@ public final ValueExpressionContext valueExpression() throws RecognitionExceptio ValueExpressionContext _localctx = new ValueExpressionContext(_ctx, getState()); enterRule(_localctx, 14, RULE_valueExpression); try { - setState(209); + setState(212); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,12,_ctx) ) { case 1: _localctx = new ValueExpressionDefaultContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(204); + setState(207); operatorExpression(0); } break; @@ -1173,11 +1188,11 @@ public final ValueExpressionContext valueExpression() throws RecognitionExceptio _localctx = new ComparisonContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(205); + setState(208); ((ComparisonContext)_localctx).left = operatorExpression(0); - setState(206); + setState(209); comparisonOperator(); - setState(207); + setState(210); ((ComparisonContext)_localctx).right = operatorExpression(0); } break; @@ -1302,7 +1317,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE int _alt; enterOuterAlt(_localctx, 1); { - setState(215); + setState(218); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,13,_ctx) ) { case 1: @@ -1311,7 +1326,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _ctx = _localctx; _prevctx = _localctx; - setState(212); + setState(215); primaryExpression(0); } break; @@ -1320,7 +1335,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticUnaryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(213); + setState(216); ((ArithmeticUnaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -1331,13 +1346,13 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(214); + setState(217); operatorExpression(3); } break; } _ctx.stop = _input.LT(-1); - setState(225); + setState(228); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,15,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -1345,7 +1360,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(223); + setState(226); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,14,_ctx) ) { case 1: @@ -1353,12 +1368,12 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticBinaryContext(new OperatorExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_operatorExpression); - setState(217); + setState(220); if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); - setState(218); + setState(221); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); - if ( !(((((_la - 62)) & ~0x3f) == 0 && ((1L << (_la - 62)) & 7L) != 0)) ) { + if ( !(((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & 7L) != 0)) ) { ((ArithmeticBinaryContext)_localctx).operator = (Token)_errHandler.recoverInline(this); } else { @@ -1366,7 +1381,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(219); + setState(222); ((ArithmeticBinaryContext)_localctx).right = operatorExpression(3); } break; @@ -1375,9 +1390,9 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticBinaryContext(new OperatorExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_operatorExpression); - setState(220); + setState(223); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(221); + setState(224); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -1388,14 +1403,14 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(222); + setState(225); ((ArithmeticBinaryContext)_localctx).right = operatorExpression(2); } break; } } } - setState(227); + setState(230); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,15,_ctx); } @@ -1553,7 +1568,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc int _alt; enterOuterAlt(_localctx, 1); { - setState(236); + setState(239); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,16,_ctx) ) { case 1: @@ -1562,7 +1577,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _ctx = _localctx; _prevctx = _localctx; - setState(229); + setState(232); constant(); } break; @@ -1571,7 +1586,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _localctx = new DereferenceContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(230); + setState(233); qualifiedName(); } break; @@ -1580,7 +1595,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _localctx = new FunctionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(231); + setState(234); functionExpression(); } break; @@ -1589,17 +1604,17 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _localctx = new ParenthesizedExpressionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(232); + setState(235); match(LP); - setState(233); + setState(236); booleanExpression(0); - setState(234); + setState(237); match(RP); } break; } _ctx.stop = _input.LT(-1); - setState(243); + setState(246); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,17,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -1610,16 +1625,16 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc { _localctx = new InlineCastContext(new PrimaryExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_primaryExpression); - setState(238); + setState(241); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(239); + setState(242); match(CAST_OP); - setState(240); + setState(243); dataType(); } } } - setState(245); + setState(248); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,17,_ctx); } @@ -1681,16 +1696,16 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(246); + setState(249); identifier(); - setState(247); + setState(250); match(LP); - setState(257); + setState(260); _errHandler.sync(this); switch (_input.LA(1)) { case ASTERISK: { - setState(248); + setState(251); match(ASTERISK); } break; @@ -1710,21 +1725,21 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx case QUOTED_IDENTIFIER: { { - setState(249); + setState(252); booleanExpression(0); - setState(254); + setState(257); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(250); + setState(253); match(COMMA); - setState(251); + setState(254); booleanExpression(0); } } - setState(256); + setState(259); _errHandler.sync(this); _la = _input.LA(1); } @@ -1736,7 +1751,7 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx default: break; } - setState(259); + setState(262); match(RP); } } @@ -1794,7 +1809,7 @@ public final DataTypeContext dataType() throws RecognitionException { _localctx = new ToDataTypeContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(261); + setState(264); identifier(); } } @@ -1841,9 +1856,9 @@ public final RowCommandContext rowCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(263); + setState(266); match(ROW); - setState(264); + setState(267); fields(); } } @@ -1897,23 +1912,23 @@ public final FieldsContext fields() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(266); + setState(269); field(); - setState(271); + setState(274); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,20,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(267); + setState(270); match(COMMA); - setState(268); + setState(271); field(); } } } - setState(273); + setState(276); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,20,_ctx); } @@ -1963,24 +1978,24 @@ public final FieldContext field() throws RecognitionException { FieldContext _localctx = new FieldContext(_ctx, getState()); enterRule(_localctx, 28, RULE_field); try { - setState(279); + setState(282); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,21,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(274); + setState(277); booleanExpression(0); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(275); + setState(278); qualifiedName(); - setState(276); + setState(279); match(ASSIGN); - setState(277); + setState(280); booleanExpression(0); } break; @@ -2000,11 +2015,11 @@ public final FieldContext field() throws RecognitionException { @SuppressWarnings("CheckReturnValue") public static class FromCommandContext extends ParserRuleContext { public TerminalNode FROM() { return getToken(EsqlBaseParser.FROM, 0); } - public List fromIdentifier() { - return getRuleContexts(FromIdentifierContext.class); + public List indexIdentifier() { + return getRuleContexts(IndexIdentifierContext.class); } - public FromIdentifierContext fromIdentifier(int i) { - return getRuleContext(FromIdentifierContext.class,i); + public IndexIdentifierContext indexIdentifier(int i) { + return getRuleContext(IndexIdentifierContext.class,i); } public List COMMA() { return getTokens(EsqlBaseParser.COMMA); } public TerminalNode COMMA(int i) { @@ -2043,44 +2058,44 @@ public final FromCommandContext fromCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(281); + setState(284); match(FROM); - setState(282); - fromIdentifier(); - setState(287); + setState(285); + indexIdentifier(); + setState(290); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,22,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(283); + setState(286); match(COMMA); - setState(284); - fromIdentifier(); + setState(287); + indexIdentifier(); } } } - setState(289); + setState(292); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,22,_ctx); } - setState(291); + setState(294); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,23,_ctx) ) { case 1: { - setState(290); + setState(293); metadata(); } break; } - setState(294); + setState(297); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,24,_ctx) ) { case 1: { - setState(293); + setState(296); fromOptions(); } break; @@ -2099,46 +2114,36 @@ public final FromCommandContext fromCommand() throws RecognitionException { } @SuppressWarnings("CheckReturnValue") - public static class FromIdentifierContext extends ParserRuleContext { - public TerminalNode FROM_UNQUOTED_IDENTIFIER() { return getToken(EsqlBaseParser.FROM_UNQUOTED_IDENTIFIER, 0); } - public TerminalNode QUOTED_IDENTIFIER() { return getToken(EsqlBaseParser.QUOTED_IDENTIFIER, 0); } + public static class IndexIdentifierContext extends ParserRuleContext { + public TerminalNode INDEX_UNQUOTED_IDENTIFIER() { return getToken(EsqlBaseParser.INDEX_UNQUOTED_IDENTIFIER, 0); } @SuppressWarnings("this-escape") - public FromIdentifierContext(ParserRuleContext parent, int invokingState) { + public IndexIdentifierContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } - @Override public int getRuleIndex() { return RULE_fromIdentifier; } + @Override public int getRuleIndex() { return RULE_indexIdentifier; } @Override public void enterRule(ParseTreeListener listener) { - if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterFromIdentifier(this); + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterIndexIdentifier(this); } @Override public void exitRule(ParseTreeListener listener) { - if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitFromIdentifier(this); + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitIndexIdentifier(this); } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor)visitor).visitFromIdentifier(this); + if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor)visitor).visitIndexIdentifier(this); else return visitor.visitChildren(this); } } - public final FromIdentifierContext fromIdentifier() throws RecognitionException { - FromIdentifierContext _localctx = new FromIdentifierContext(_ctx, getState()); - enterRule(_localctx, 32, RULE_fromIdentifier); - int _la; + public final IndexIdentifierContext indexIdentifier() throws RecognitionException { + IndexIdentifierContext _localctx = new IndexIdentifierContext(_ctx, getState()); + enterRule(_localctx, 32, RULE_indexIdentifier); try { enterOuterAlt(_localctx, 1); { - setState(296); - _la = _input.LA(1); - if ( !(_la==QUOTED_IDENTIFIER || _la==FROM_UNQUOTED_IDENTIFIER) ) { - _errHandler.recoverInline(this); - } - else { - if ( _input.LA(1)==Token.EOF ) matchedEOF = true; - _errHandler.reportMatch(this); - consume(); - } + setState(299); + match(INDEX_UNQUOTED_IDENTIFIER); } } catch (RecognitionException re) { @@ -2192,25 +2197,25 @@ public final FromOptionsContext fromOptions() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(298); + setState(301); match(OPTIONS); - setState(299); + setState(302); configOption(); - setState(304); + setState(307); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,25,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(300); + setState(303); match(COMMA); - setState(301); + setState(304); configOption(); } } } - setState(306); + setState(309); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,25,_ctx); } @@ -2262,11 +2267,11 @@ public final ConfigOptionContext configOption() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(307); + setState(310); string(); - setState(308); + setState(311); match(ASSIGN); - setState(309); + setState(312); string(); } } @@ -2313,20 +2318,20 @@ public final MetadataContext metadata() throws RecognitionException { MetadataContext _localctx = new MetadataContext(_ctx, getState()); enterRule(_localctx, 38, RULE_metadata); try { - setState(313); + setState(316); _errHandler.sync(this); switch (_input.LA(1)) { case METADATA: enterOuterAlt(_localctx, 1); { - setState(311); + setState(314); metadataOption(); } break; case OPENING_BRACKET: enterOuterAlt(_localctx, 2); { - setState(312); + setState(315); deprecated_metadata(); } break; @@ -2348,11 +2353,11 @@ public final MetadataContext metadata() throws RecognitionException { @SuppressWarnings("CheckReturnValue") public static class MetadataOptionContext extends ParserRuleContext { public TerminalNode METADATA() { return getToken(EsqlBaseParser.METADATA, 0); } - public List fromIdentifier() { - return getRuleContexts(FromIdentifierContext.class); + public List indexIdentifier() { + return getRuleContexts(IndexIdentifierContext.class); } - public FromIdentifierContext fromIdentifier(int i) { - return getRuleContext(FromIdentifierContext.class,i); + public IndexIdentifierContext indexIdentifier(int i) { + return getRuleContext(IndexIdentifierContext.class,i); } public List COMMA() { return getTokens(EsqlBaseParser.COMMA); } public TerminalNode COMMA(int i) { @@ -2385,25 +2390,25 @@ public final MetadataOptionContext metadataOption() throws RecognitionException int _alt; enterOuterAlt(_localctx, 1); { - setState(315); + setState(318); match(METADATA); - setState(316); - fromIdentifier(); - setState(321); + setState(319); + indexIdentifier(); + setState(324); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,27,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(317); + setState(320); match(COMMA); - setState(318); - fromIdentifier(); + setState(321); + indexIdentifier(); } } } - setState(323); + setState(326); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,27,_ctx); } @@ -2452,11 +2457,11 @@ public final Deprecated_metadataContext deprecated_metadata() throws Recognition try { enterOuterAlt(_localctx, 1); { - setState(324); + setState(327); match(OPENING_BRACKET); - setState(325); + setState(328); metadataOption(); - setState(326); + setState(329); match(CLOSING_BRACKET); } } @@ -2471,6 +2476,112 @@ public final Deprecated_metadataContext deprecated_metadata() throws Recognition return _localctx; } + @SuppressWarnings("CheckReturnValue") + public static class MetricsCommandContext extends ParserRuleContext { + public FieldsContext aggregates; + public FieldsContext grouping; + public TerminalNode METRICS() { return getToken(EsqlBaseParser.METRICS, 0); } + public List indexIdentifier() { + return getRuleContexts(IndexIdentifierContext.class); + } + public IndexIdentifierContext indexIdentifier(int i) { + return getRuleContext(IndexIdentifierContext.class,i); + } + public List COMMA() { return getTokens(EsqlBaseParser.COMMA); } + public TerminalNode COMMA(int i) { + return getToken(EsqlBaseParser.COMMA, i); + } + public TerminalNode BY() { return getToken(EsqlBaseParser.BY, 0); } + public List fields() { + return getRuleContexts(FieldsContext.class); + } + public FieldsContext fields(int i) { + return getRuleContext(FieldsContext.class,i); + } + @SuppressWarnings("this-escape") + public MetricsCommandContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_metricsCommand; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterMetricsCommand(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitMetricsCommand(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor)visitor).visitMetricsCommand(this); + else return visitor.visitChildren(this); + } + } + + public final MetricsCommandContext metricsCommand() throws RecognitionException { + MetricsCommandContext _localctx = new MetricsCommandContext(_ctx, getState()); + enterRule(_localctx, 44, RULE_metricsCommand); + try { + int _alt; + enterOuterAlt(_localctx, 1); + { + setState(331); + match(METRICS); + setState(332); + indexIdentifier(); + setState(337); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input,28,_ctx); + while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { + if ( _alt==1 ) { + { + { + setState(333); + match(COMMA); + setState(334); + indexIdentifier(); + } + } + } + setState(339); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input,28,_ctx); + } + setState(341); + _errHandler.sync(this); + switch ( getInterpreter().adaptivePredict(_input,29,_ctx) ) { + case 1: + { + setState(340); + ((MetricsCommandContext)_localctx).aggregates = fields(); + } + break; + } + setState(345); + _errHandler.sync(this); + switch ( getInterpreter().adaptivePredict(_input,30,_ctx) ) { + case 1: + { + setState(343); + match(BY); + setState(344); + ((MetricsCommandContext)_localctx).grouping = fields(); + } + break; + } + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + @SuppressWarnings("CheckReturnValue") public static class EvalCommandContext extends ParserRuleContext { public TerminalNode EVAL() { return getToken(EsqlBaseParser.EVAL, 0); } @@ -2499,13 +2610,13 @@ public T accept(ParseTreeVisitor visitor) { public final EvalCommandContext evalCommand() throws RecognitionException { EvalCommandContext _localctx = new EvalCommandContext(_ctx, getState()); - enterRule(_localctx, 44, RULE_evalCommand); + enterRule(_localctx, 46, RULE_evalCommand); try { enterOuterAlt(_localctx, 1); { - setState(328); + setState(347); match(EVAL); - setState(329); + setState(348); fields(); } } @@ -2554,30 +2665,30 @@ public T accept(ParseTreeVisitor visitor) { public final StatsCommandContext statsCommand() throws RecognitionException { StatsCommandContext _localctx = new StatsCommandContext(_ctx, getState()); - enterRule(_localctx, 46, RULE_statsCommand); + enterRule(_localctx, 48, RULE_statsCommand); try { enterOuterAlt(_localctx, 1); { - setState(331); + setState(350); match(STATS); - setState(333); + setState(352); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,28,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,31,_ctx) ) { case 1: { - setState(332); + setState(351); ((StatsCommandContext)_localctx).stats = fields(); } break; } - setState(337); + setState(356); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,29,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,32,_ctx) ) { case 1: { - setState(335); + setState(354); match(BY); - setState(336); + setState(355); ((StatsCommandContext)_localctx).grouping = fields(); } break; @@ -2629,22 +2740,22 @@ public T accept(ParseTreeVisitor visitor) { public final InlinestatsCommandContext inlinestatsCommand() throws RecognitionException { InlinestatsCommandContext _localctx = new InlinestatsCommandContext(_ctx, getState()); - enterRule(_localctx, 48, RULE_inlinestatsCommand); + enterRule(_localctx, 50, RULE_inlinestatsCommand); try { enterOuterAlt(_localctx, 1); { - setState(339); + setState(358); match(INLINESTATS); - setState(340); + setState(359); ((InlinestatsCommandContext)_localctx).stats = fields(); - setState(343); + setState(362); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,30,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,33,_ctx) ) { case 1: { - setState(341); + setState(360); match(BY); - setState(342); + setState(361); ((InlinestatsCommandContext)_localctx).grouping = fields(); } break; @@ -2696,30 +2807,30 @@ public T accept(ParseTreeVisitor visitor) { public final QualifiedNameContext qualifiedName() throws RecognitionException { QualifiedNameContext _localctx = new QualifiedNameContext(_ctx, getState()); - enterRule(_localctx, 50, RULE_qualifiedName); + enterRule(_localctx, 52, RULE_qualifiedName); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(345); + setState(364); identifier(); - setState(350); + setState(369); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,31,_ctx); + _alt = getInterpreter().adaptivePredict(_input,34,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(346); + setState(365); match(DOT); - setState(347); + setState(366); identifier(); } } } - setState(352); + setState(371); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,31,_ctx); + _alt = getInterpreter().adaptivePredict(_input,34,_ctx); } } } @@ -2768,30 +2879,30 @@ public T accept(ParseTreeVisitor visitor) { public final QualifiedNamePatternContext qualifiedNamePattern() throws RecognitionException { QualifiedNamePatternContext _localctx = new QualifiedNamePatternContext(_ctx, getState()); - enterRule(_localctx, 52, RULE_qualifiedNamePattern); + enterRule(_localctx, 54, RULE_qualifiedNamePattern); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(353); + setState(372); identifierPattern(); - setState(358); + setState(377); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,32,_ctx); + _alt = getInterpreter().adaptivePredict(_input,35,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(354); + setState(373); match(DOT); - setState(355); + setState(374); identifierPattern(); } } } - setState(360); + setState(379); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,32,_ctx); + _alt = getInterpreter().adaptivePredict(_input,35,_ctx); } } } @@ -2832,12 +2943,12 @@ public T accept(ParseTreeVisitor visitor) { public final IdentifierContext identifier() throws RecognitionException { IdentifierContext _localctx = new IdentifierContext(_ctx, getState()); - enterRule(_localctx, 54, RULE_identifier); + enterRule(_localctx, 56, RULE_identifier); int _la; try { enterOuterAlt(_localctx, 1); { - setState(361); + setState(380); _la = _input.LA(1); if ( !(_la==UNQUOTED_IDENTIFIER || _la==QUOTED_IDENTIFIER) ) { _errHandler.recoverInline(this); @@ -2885,11 +2996,11 @@ public T accept(ParseTreeVisitor visitor) { public final IdentifierPatternContext identifierPattern() throws RecognitionException { IdentifierPatternContext _localctx = new IdentifierPatternContext(_ctx, getState()); - enterRule(_localctx, 56, RULE_identifierPattern); + enterRule(_localctx, 58, RULE_identifierPattern); try { enterOuterAlt(_localctx, 1); { - setState(363); + setState(382); match(ID_PATTERN); } } @@ -3155,17 +3266,17 @@ public T accept(ParseTreeVisitor visitor) { public final ConstantContext constant() throws RecognitionException { ConstantContext _localctx = new ConstantContext(_ctx, getState()); - enterRule(_localctx, 58, RULE_constant); + enterRule(_localctx, 60, RULE_constant); int _la; try { - setState(407); + setState(426); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,36,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,39,_ctx) ) { case 1: _localctx = new NullLiteralContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(365); + setState(384); match(NULL); } break; @@ -3173,9 +3284,9 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new QualifiedIntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(366); + setState(385); integerValue(); - setState(367); + setState(386); match(UNQUOTED_IDENTIFIER); } break; @@ -3183,7 +3294,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new DecimalLiteralContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(369); + setState(388); decimalValue(); } break; @@ -3191,7 +3302,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new IntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(370); + setState(389); integerValue(); } break; @@ -3199,7 +3310,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new BooleanLiteralContext(_localctx); enterOuterAlt(_localctx, 5); { - setState(371); + setState(390); booleanValue(); } break; @@ -3207,7 +3318,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new InputParamContext(_localctx); enterOuterAlt(_localctx, 6); { - setState(372); + setState(391); match(PARAM); } break; @@ -3215,7 +3326,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new StringLiteralContext(_localctx); enterOuterAlt(_localctx, 7); { - setState(373); + setState(392); string(); } break; @@ -3223,27 +3334,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new NumericArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 8); { - setState(374); + setState(393); match(OPENING_BRACKET); - setState(375); + setState(394); numericValue(); - setState(380); + setState(399); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(376); + setState(395); match(COMMA); - setState(377); + setState(396); numericValue(); } } - setState(382); + setState(401); _errHandler.sync(this); _la = _input.LA(1); } - setState(383); + setState(402); match(CLOSING_BRACKET); } break; @@ -3251,27 +3362,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new BooleanArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 9); { - setState(385); + setState(404); match(OPENING_BRACKET); - setState(386); + setState(405); booleanValue(); - setState(391); + setState(410); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(387); + setState(406); match(COMMA); - setState(388); + setState(407); booleanValue(); } } - setState(393); + setState(412); _errHandler.sync(this); _la = _input.LA(1); } - setState(394); + setState(413); match(CLOSING_BRACKET); } break; @@ -3279,27 +3390,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new StringArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 10); { - setState(396); + setState(415); match(OPENING_BRACKET); - setState(397); + setState(416); string(); - setState(402); + setState(421); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(398); + setState(417); match(COMMA); - setState(399); + setState(418); string(); } } - setState(404); + setState(423); _errHandler.sync(this); _la = _input.LA(1); } - setState(405); + setState(424); match(CLOSING_BRACKET); } break; @@ -3342,13 +3453,13 @@ public T accept(ParseTreeVisitor visitor) { public final LimitCommandContext limitCommand() throws RecognitionException { LimitCommandContext _localctx = new LimitCommandContext(_ctx, getState()); - enterRule(_localctx, 60, RULE_limitCommand); + enterRule(_localctx, 62, RULE_limitCommand); try { enterOuterAlt(_localctx, 1); { - setState(409); + setState(428); match(LIMIT); - setState(410); + setState(429); match(INTEGER_LITERAL); } } @@ -3398,32 +3509,32 @@ public T accept(ParseTreeVisitor visitor) { public final SortCommandContext sortCommand() throws RecognitionException { SortCommandContext _localctx = new SortCommandContext(_ctx, getState()); - enterRule(_localctx, 62, RULE_sortCommand); + enterRule(_localctx, 64, RULE_sortCommand); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(412); + setState(431); match(SORT); - setState(413); + setState(432); orderExpression(); - setState(418); + setState(437); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,37,_ctx); + _alt = getInterpreter().adaptivePredict(_input,40,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(414); + setState(433); match(COMMA); - setState(415); + setState(434); orderExpression(); } } } - setState(420); + setState(439); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,37,_ctx); + _alt = getInterpreter().adaptivePredict(_input,40,_ctx); } } } @@ -3472,19 +3583,19 @@ public T accept(ParseTreeVisitor visitor) { public final OrderExpressionContext orderExpression() throws RecognitionException { OrderExpressionContext _localctx = new OrderExpressionContext(_ctx, getState()); - enterRule(_localctx, 64, RULE_orderExpression); + enterRule(_localctx, 66, RULE_orderExpression); int _la; try { enterOuterAlt(_localctx, 1); { - setState(421); + setState(440); booleanExpression(0); - setState(423); + setState(442); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,38,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,41,_ctx) ) { case 1: { - setState(422); + setState(441); ((OrderExpressionContext)_localctx).ordering = _input.LT(1); _la = _input.LA(1); if ( !(_la==ASC || _la==DESC) ) { @@ -3498,14 +3609,14 @@ public final OrderExpressionContext orderExpression() throws RecognitionExceptio } break; } - setState(427); + setState(446); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,39,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,42,_ctx) ) { case 1: { - setState(425); + setState(444); match(NULLS); - setState(426); + setState(445); ((OrderExpressionContext)_localctx).nullOrdering = _input.LT(1); _la = _input.LA(1); if ( !(_la==FIRST || _la==LAST) ) { @@ -3567,32 +3678,32 @@ public T accept(ParseTreeVisitor visitor) { public final KeepCommandContext keepCommand() throws RecognitionException { KeepCommandContext _localctx = new KeepCommandContext(_ctx, getState()); - enterRule(_localctx, 66, RULE_keepCommand); + enterRule(_localctx, 68, RULE_keepCommand); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(429); + setState(448); match(KEEP); - setState(430); + setState(449); qualifiedNamePattern(); - setState(435); + setState(454); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,40,_ctx); + _alt = getInterpreter().adaptivePredict(_input,43,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(431); + setState(450); match(COMMA); - setState(432); + setState(451); qualifiedNamePattern(); } } } - setState(437); + setState(456); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,40,_ctx); + _alt = getInterpreter().adaptivePredict(_input,43,_ctx); } } } @@ -3642,32 +3753,32 @@ public T accept(ParseTreeVisitor visitor) { public final DropCommandContext dropCommand() throws RecognitionException { DropCommandContext _localctx = new DropCommandContext(_ctx, getState()); - enterRule(_localctx, 68, RULE_dropCommand); + enterRule(_localctx, 70, RULE_dropCommand); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(438); + setState(457); match(DROP); - setState(439); + setState(458); qualifiedNamePattern(); - setState(444); + setState(463); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,41,_ctx); + _alt = getInterpreter().adaptivePredict(_input,44,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(440); + setState(459); match(COMMA); - setState(441); + setState(460); qualifiedNamePattern(); } } } - setState(446); + setState(465); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,41,_ctx); + _alt = getInterpreter().adaptivePredict(_input,44,_ctx); } } } @@ -3717,32 +3828,32 @@ public T accept(ParseTreeVisitor visitor) { public final RenameCommandContext renameCommand() throws RecognitionException { RenameCommandContext _localctx = new RenameCommandContext(_ctx, getState()); - enterRule(_localctx, 70, RULE_renameCommand); + enterRule(_localctx, 72, RULE_renameCommand); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(447); + setState(466); match(RENAME); - setState(448); + setState(467); renameClause(); - setState(453); + setState(472); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,42,_ctx); + _alt = getInterpreter().adaptivePredict(_input,45,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(449); + setState(468); match(COMMA); - setState(450); + setState(469); renameClause(); } } } - setState(455); + setState(474); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,42,_ctx); + _alt = getInterpreter().adaptivePredict(_input,45,_ctx); } } } @@ -3790,15 +3901,15 @@ public T accept(ParseTreeVisitor visitor) { public final RenameClauseContext renameClause() throws RecognitionException { RenameClauseContext _localctx = new RenameClauseContext(_ctx, getState()); - enterRule(_localctx, 72, RULE_renameClause); + enterRule(_localctx, 74, RULE_renameClause); try { enterOuterAlt(_localctx, 1); { - setState(456); + setState(475); ((RenameClauseContext)_localctx).oldName = qualifiedNamePattern(); - setState(457); + setState(476); match(AS); - setState(458); + setState(477); ((RenameClauseContext)_localctx).newName = qualifiedNamePattern(); } } @@ -3847,22 +3958,22 @@ public T accept(ParseTreeVisitor visitor) { public final DissectCommandContext dissectCommand() throws RecognitionException { DissectCommandContext _localctx = new DissectCommandContext(_ctx, getState()); - enterRule(_localctx, 74, RULE_dissectCommand); + enterRule(_localctx, 76, RULE_dissectCommand); try { enterOuterAlt(_localctx, 1); { - setState(460); + setState(479); match(DISSECT); - setState(461); + setState(480); primaryExpression(0); - setState(462); + setState(481); string(); - setState(464); + setState(483); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,43,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,46,_ctx) ) { case 1: { - setState(463); + setState(482); commandOptions(); } break; @@ -3911,15 +4022,15 @@ public T accept(ParseTreeVisitor visitor) { public final GrokCommandContext grokCommand() throws RecognitionException { GrokCommandContext _localctx = new GrokCommandContext(_ctx, getState()); - enterRule(_localctx, 76, RULE_grokCommand); + enterRule(_localctx, 78, RULE_grokCommand); try { enterOuterAlt(_localctx, 1); { - setState(466); + setState(485); match(GROK); - setState(467); + setState(486); primaryExpression(0); - setState(468); + setState(487); string(); } } @@ -3962,13 +4073,13 @@ public T accept(ParseTreeVisitor visitor) { public final MvExpandCommandContext mvExpandCommand() throws RecognitionException { MvExpandCommandContext _localctx = new MvExpandCommandContext(_ctx, getState()); - enterRule(_localctx, 78, RULE_mvExpandCommand); + enterRule(_localctx, 80, RULE_mvExpandCommand); try { enterOuterAlt(_localctx, 1); { - setState(470); + setState(489); match(MV_EXPAND); - setState(471); + setState(490); qualifiedName(); } } @@ -4017,30 +4128,30 @@ public T accept(ParseTreeVisitor visitor) { public final CommandOptionsContext commandOptions() throws RecognitionException { CommandOptionsContext _localctx = new CommandOptionsContext(_ctx, getState()); - enterRule(_localctx, 80, RULE_commandOptions); + enterRule(_localctx, 82, RULE_commandOptions); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(473); + setState(492); commandOption(); - setState(478); + setState(497); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,44,_ctx); + _alt = getInterpreter().adaptivePredict(_input,47,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(474); + setState(493); match(COMMA); - setState(475); + setState(494); commandOption(); } } } - setState(480); + setState(499); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,44,_ctx); + _alt = getInterpreter().adaptivePredict(_input,47,_ctx); } } } @@ -4086,15 +4197,15 @@ public T accept(ParseTreeVisitor visitor) { public final CommandOptionContext commandOption() throws RecognitionException { CommandOptionContext _localctx = new CommandOptionContext(_ctx, getState()); - enterRule(_localctx, 82, RULE_commandOption); + enterRule(_localctx, 84, RULE_commandOption); try { enterOuterAlt(_localctx, 1); { - setState(481); + setState(500); identifier(); - setState(482); + setState(501); match(ASSIGN); - setState(483); + setState(502); constant(); } } @@ -4135,12 +4246,12 @@ public T accept(ParseTreeVisitor visitor) { public final BooleanValueContext booleanValue() throws RecognitionException { BooleanValueContext _localctx = new BooleanValueContext(_ctx, getState()); - enterRule(_localctx, 84, RULE_booleanValue); + enterRule(_localctx, 86, RULE_booleanValue); int _la; try { enterOuterAlt(_localctx, 1); { - setState(485); + setState(504); _la = _input.LA(1); if ( !(_la==FALSE || _la==TRUE) ) { _errHandler.recoverInline(this); @@ -4193,22 +4304,22 @@ public T accept(ParseTreeVisitor visitor) { public final NumericValueContext numericValue() throws RecognitionException { NumericValueContext _localctx = new NumericValueContext(_ctx, getState()); - enterRule(_localctx, 86, RULE_numericValue); + enterRule(_localctx, 88, RULE_numericValue); try { - setState(489); + setState(508); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,45,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,48,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(487); + setState(506); decimalValue(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(488); + setState(507); integerValue(); } break; @@ -4252,17 +4363,17 @@ public T accept(ParseTreeVisitor visitor) { public final DecimalValueContext decimalValue() throws RecognitionException { DecimalValueContext _localctx = new DecimalValueContext(_ctx, getState()); - enterRule(_localctx, 88, RULE_decimalValue); + enterRule(_localctx, 90, RULE_decimalValue); int _la; try { enterOuterAlt(_localctx, 1); { - setState(492); + setState(511); _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(491); + setState(510); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -4275,7 +4386,7 @@ public final DecimalValueContext decimalValue() throws RecognitionException { } } - setState(494); + setState(513); match(DECIMAL_LITERAL); } } @@ -4317,17 +4428,17 @@ public T accept(ParseTreeVisitor visitor) { public final IntegerValueContext integerValue() throws RecognitionException { IntegerValueContext _localctx = new IntegerValueContext(_ctx, getState()); - enterRule(_localctx, 90, RULE_integerValue); + enterRule(_localctx, 92, RULE_integerValue); int _la; try { enterOuterAlt(_localctx, 1); { - setState(497); + setState(516); _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(496); + setState(515); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -4340,7 +4451,7 @@ public final IntegerValueContext integerValue() throws RecognitionException { } } - setState(499); + setState(518); match(INTEGER_LITERAL); } } @@ -4380,11 +4491,11 @@ public T accept(ParseTreeVisitor visitor) { public final StringContext string() throws RecognitionException { StringContext _localctx = new StringContext(_ctx, getState()); - enterRule(_localctx, 92, RULE_string); + enterRule(_localctx, 94, RULE_string); try { enterOuterAlt(_localctx, 1); { - setState(501); + setState(520); match(QUOTED_STRING); } } @@ -4429,14 +4540,14 @@ public T accept(ParseTreeVisitor visitor) { public final ComparisonOperatorContext comparisonOperator() throws RecognitionException { ComparisonOperatorContext _localctx = new ComparisonOperatorContext(_ctx, getState()); - enterRule(_localctx, 94, RULE_comparisonOperator); + enterRule(_localctx, 96, RULE_comparisonOperator); int _la; try { enterOuterAlt(_localctx, 1); { - setState(503); + setState(522); _la = _input.LA(1); - if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & 1125899906842624000L) != 0)) ) { + if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & 4503599627370496000L) != 0)) ) { _errHandler.recoverInline(this); } else { @@ -4485,13 +4596,13 @@ public T accept(ParseTreeVisitor visitor) { public final ExplainCommandContext explainCommand() throws RecognitionException { ExplainCommandContext _localctx = new ExplainCommandContext(_ctx, getState()); - enterRule(_localctx, 96, RULE_explainCommand); + enterRule(_localctx, 98, RULE_explainCommand); try { enterOuterAlt(_localctx, 1); { - setState(505); + setState(524); match(EXPLAIN); - setState(506); + setState(525); subqueryExpression(); } } @@ -4535,15 +4646,15 @@ public T accept(ParseTreeVisitor visitor) { public final SubqueryExpressionContext subqueryExpression() throws RecognitionException { SubqueryExpressionContext _localctx = new SubqueryExpressionContext(_ctx, getState()); - enterRule(_localctx, 98, RULE_subqueryExpression); + enterRule(_localctx, 100, RULE_subqueryExpression); try { enterOuterAlt(_localctx, 1); { - setState(508); + setState(527); match(OPENING_BRACKET); - setState(509); + setState(528); query(0); - setState(510); + setState(529); match(CLOSING_BRACKET); } } @@ -4595,14 +4706,14 @@ public T accept(ParseTreeVisitor visitor) { public final ShowCommandContext showCommand() throws RecognitionException { ShowCommandContext _localctx = new ShowCommandContext(_ctx, getState()); - enterRule(_localctx, 100, RULE_showCommand); + enterRule(_localctx, 102, RULE_showCommand); try { _localctx = new ShowInfoContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(512); + setState(531); match(SHOW); - setState(513); + setState(532); match(INFO); } } @@ -4654,14 +4765,14 @@ public T accept(ParseTreeVisitor visitor) { public final MetaCommandContext metaCommand() throws RecognitionException { MetaCommandContext _localctx = new MetaCommandContext(_ctx, getState()); - enterRule(_localctx, 102, RULE_metaCommand); + enterRule(_localctx, 104, RULE_metaCommand); try { _localctx = new MetaFunctionsContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(515); + setState(534); match(META); - setState(516); + setState(535); match(FUNCTIONS); } } @@ -4719,53 +4830,53 @@ public T accept(ParseTreeVisitor visitor) { public final EnrichCommandContext enrichCommand() throws RecognitionException { EnrichCommandContext _localctx = new EnrichCommandContext(_ctx, getState()); - enterRule(_localctx, 104, RULE_enrichCommand); + enterRule(_localctx, 106, RULE_enrichCommand); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(518); + setState(537); match(ENRICH); - setState(519); + setState(538); ((EnrichCommandContext)_localctx).policyName = match(ENRICH_POLICY_NAME); - setState(522); + setState(541); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,48,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,51,_ctx) ) { case 1: { - setState(520); + setState(539); match(ON); - setState(521); + setState(540); ((EnrichCommandContext)_localctx).matchField = qualifiedNamePattern(); } break; } - setState(533); + setState(552); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,50,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,53,_ctx) ) { case 1: { - setState(524); + setState(543); match(WITH); - setState(525); + setState(544); enrichWithClause(); - setState(530); + setState(549); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,49,_ctx); + _alt = getInterpreter().adaptivePredict(_input,52,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(526); + setState(545); match(COMMA); - setState(527); + setState(546); enrichWithClause(); } } } - setState(532); + setState(551); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,49,_ctx); + _alt = getInterpreter().adaptivePredict(_input,52,_ctx); } } break; @@ -4816,23 +4927,23 @@ public T accept(ParseTreeVisitor visitor) { public final EnrichWithClauseContext enrichWithClause() throws RecognitionException { EnrichWithClauseContext _localctx = new EnrichWithClauseContext(_ctx, getState()); - enterRule(_localctx, 106, RULE_enrichWithClause); + enterRule(_localctx, 108, RULE_enrichWithClause); try { enterOuterAlt(_localctx, 1); { - setState(538); + setState(557); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,51,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,54,_ctx) ) { case 1: { - setState(535); + setState(554); ((EnrichWithClauseContext)_localctx).newName = qualifiedNamePattern(); - setState(536); + setState(555); match(ASSIGN); } break; } - setState(540); + setState(559); ((EnrichWithClauseContext)_localctx).enrichField = qualifiedNamePattern(); } } @@ -4894,7 +5005,7 @@ private boolean primaryExpression_sempred(PrimaryExpressionContext _localctx, in } public static final String _serializedATN = - "\u0004\u0001n\u021f\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002"+ + "\u0004\u0001u\u0232\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002"+ "\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004\u0002"+ "\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007\u0002"+ "\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b\u0002"+ @@ -4908,337 +5019,350 @@ private boolean primaryExpression_sempred(PrimaryExpressionContext _localctx, in "#\u0007#\u0002$\u0007$\u0002%\u0007%\u0002&\u0007&\u0002\'\u0007\'\u0002"+ "(\u0007(\u0002)\u0007)\u0002*\u0007*\u0002+\u0007+\u0002,\u0007,\u0002"+ "-\u0007-\u0002.\u0007.\u0002/\u0007/\u00020\u00070\u00021\u00071\u0002"+ - "2\u00072\u00023\u00073\u00024\u00074\u00025\u00075\u0001\u0000\u0001\u0000"+ - "\u0001\u0000\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ - "\u0001\u0001\u0005\u0001v\b\u0001\n\u0001\f\u0001y\t\u0001\u0001\u0002"+ - "\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0003\u0002\u0080\b\u0002"+ - "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003"+ - "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003"+ - "\u0001\u0003\u0003\u0003\u008f\b\u0003\u0001\u0004\u0001\u0004\u0001\u0004"+ - "\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ - "\u0001\u0005\u0003\u0005\u009b\b\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ - "\u0001\u0005\u0001\u0005\u0005\u0005\u00a2\b\u0005\n\u0005\f\u0005\u00a5"+ - "\t\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0003"+ - "\u0005\u00ac\b\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u00b0\b\u0005"+ - "\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ - "\u0005\u0005\u00b8\b\u0005\n\u0005\f\u0005\u00bb\t\u0005\u0001\u0006\u0001"+ - "\u0006\u0003\u0006\u00bf\b\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001"+ - "\u0006\u0001\u0006\u0003\u0006\u00c6\b\u0006\u0001\u0006\u0001\u0006\u0001"+ - "\u0006\u0003\u0006\u00cb\b\u0006\u0001\u0007\u0001\u0007\u0001\u0007\u0001"+ - "\u0007\u0001\u0007\u0003\u0007\u00d2\b\u0007\u0001\b\u0001\b\u0001\b\u0001"+ - "\b\u0003\b\u00d8\b\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0005"+ - "\b\u00e0\b\b\n\b\f\b\u00e3\t\b\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t"+ - "\u0001\t\u0001\t\u0001\t\u0003\t\u00ed\b\t\u0001\t\u0001\t\u0001\t\u0005"+ - "\t\u00f2\b\t\n\t\f\t\u00f5\t\t\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n"+ - "\u0001\n\u0005\n\u00fd\b\n\n\n\f\n\u0100\t\n\u0003\n\u0102\b\n\u0001\n"+ - "\u0001\n\u0001\u000b\u0001\u000b\u0001\f\u0001\f\u0001\f\u0001\r\u0001"+ - "\r\u0001\r\u0005\r\u010e\b\r\n\r\f\r\u0111\t\r\u0001\u000e\u0001\u000e"+ - "\u0001\u000e\u0001\u000e\u0001\u000e\u0003\u000e\u0118\b\u000e\u0001\u000f"+ - "\u0001\u000f\u0001\u000f\u0001\u000f\u0005\u000f\u011e\b\u000f\n\u000f"+ - "\f\u000f\u0121\t\u000f\u0001\u000f\u0003\u000f\u0124\b\u000f\u0001\u000f"+ - "\u0003\u000f\u0127\b\u000f\u0001\u0010\u0001\u0010\u0001\u0011\u0001\u0011"+ - "\u0001\u0011\u0001\u0011\u0005\u0011\u012f\b\u0011\n\u0011\f\u0011\u0132"+ - "\t\u0011\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0013\u0001"+ - "\u0013\u0003\u0013\u013a\b\u0013\u0001\u0014\u0001\u0014\u0001\u0014\u0001"+ - "\u0014\u0005\u0014\u0140\b\u0014\n\u0014\f\u0014\u0143\t\u0014\u0001\u0015"+ - "\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0016\u0001\u0016\u0001\u0016"+ - "\u0001\u0017\u0001\u0017\u0003\u0017\u014e\b\u0017\u0001\u0017\u0001\u0017"+ - "\u0003\u0017\u0152\b\u0017\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0018"+ - "\u0003\u0018\u0158\b\u0018\u0001\u0019\u0001\u0019\u0001\u0019\u0005\u0019"+ - "\u015d\b\u0019\n\u0019\f\u0019\u0160\t\u0019\u0001\u001a\u0001\u001a\u0001"+ - "\u001a\u0005\u001a\u0165\b\u001a\n\u001a\f\u001a\u0168\t\u001a\u0001\u001b"+ - "\u0001\u001b\u0001\u001c\u0001\u001c\u0001\u001d\u0001\u001d\u0001\u001d"+ - "\u0001\u001d\u0001\u001d\u0001\u001d\u0001\u001d\u0001\u001d\u0001\u001d"+ - "\u0001\u001d\u0001\u001d\u0001\u001d\u0001\u001d\u0005\u001d\u017b\b\u001d"+ - "\n\u001d\f\u001d\u017e\t\u001d\u0001\u001d\u0001\u001d\u0001\u001d\u0001"+ - "\u001d\u0001\u001d\u0001\u001d\u0005\u001d\u0186\b\u001d\n\u001d\f\u001d"+ - "\u0189\t\u001d\u0001\u001d\u0001\u001d\u0001\u001d\u0001\u001d\u0001\u001d"+ - "\u0001\u001d\u0005\u001d\u0191\b\u001d\n\u001d\f\u001d\u0194\t\u001d\u0001"+ - "\u001d\u0001\u001d\u0003\u001d\u0198\b\u001d\u0001\u001e\u0001\u001e\u0001"+ - "\u001e\u0001\u001f\u0001\u001f\u0001\u001f\u0001\u001f\u0005\u001f\u01a1"+ - "\b\u001f\n\u001f\f\u001f\u01a4\t\u001f\u0001 \u0001 \u0003 \u01a8\b \u0001"+ - " \u0001 \u0003 \u01ac\b \u0001!\u0001!\u0001!\u0001!\u0005!\u01b2\b!\n"+ - "!\f!\u01b5\t!\u0001\"\u0001\"\u0001\"\u0001\"\u0005\"\u01bb\b\"\n\"\f"+ - "\"\u01be\t\"\u0001#\u0001#\u0001#\u0001#\u0005#\u01c4\b#\n#\f#\u01c7\t"+ - "#\u0001$\u0001$\u0001$\u0001$\u0001%\u0001%\u0001%\u0001%\u0003%\u01d1"+ - "\b%\u0001&\u0001&\u0001&\u0001&\u0001\'\u0001\'\u0001\'\u0001(\u0001("+ - "\u0001(\u0005(\u01dd\b(\n(\f(\u01e0\t(\u0001)\u0001)\u0001)\u0001)\u0001"+ - "*\u0001*\u0001+\u0001+\u0003+\u01ea\b+\u0001,\u0003,\u01ed\b,\u0001,\u0001"+ - ",\u0001-\u0003-\u01f2\b-\u0001-\u0001-\u0001.\u0001.\u0001/\u0001/\u0001"+ - "0\u00010\u00010\u00011\u00011\u00011\u00011\u00012\u00012\u00012\u0001"+ - "3\u00013\u00013\u00014\u00014\u00014\u00014\u00034\u020b\b4\u00014\u0001"+ - "4\u00014\u00014\u00054\u0211\b4\n4\f4\u0214\t4\u00034\u0216\b4\u00015"+ - "\u00015\u00015\u00035\u021b\b5\u00015\u00015\u00015\u0000\u0004\u0002"+ - "\n\u0010\u00126\u0000\u0002\u0004\u0006\b\n\f\u000e\u0010\u0012\u0014"+ - "\u0016\u0018\u001a\u001c\u001e \"$&(*,.02468:<>@BDFHJLNPRTVXZ\\^`bdfh"+ - "j\u0000\b\u0001\u0000<=\u0001\u0000>@\u0002\u0000DDJJ\u0001\u0000CD\u0002"+ - "\u0000 $$\u0001\u0000\'(\u0002\u0000&&44\u0002\u0000557;\u0238\u0000"+ - "l\u0001\u0000\u0000\u0000\u0002o\u0001\u0000\u0000\u0000\u0004\u007f\u0001"+ - "\u0000\u0000\u0000\u0006\u008e\u0001\u0000\u0000\u0000\b\u0090\u0001\u0000"+ - "\u0000\u0000\n\u00af\u0001\u0000\u0000\u0000\f\u00ca\u0001\u0000\u0000"+ - "\u0000\u000e\u00d1\u0001\u0000\u0000\u0000\u0010\u00d7\u0001\u0000\u0000"+ - "\u0000\u0012\u00ec\u0001\u0000\u0000\u0000\u0014\u00f6\u0001\u0000\u0000"+ - "\u0000\u0016\u0105\u0001\u0000\u0000\u0000\u0018\u0107\u0001\u0000\u0000"+ - "\u0000\u001a\u010a\u0001\u0000\u0000\u0000\u001c\u0117\u0001\u0000\u0000"+ - "\u0000\u001e\u0119\u0001\u0000\u0000\u0000 \u0128\u0001\u0000\u0000\u0000"+ - "\"\u012a\u0001\u0000\u0000\u0000$\u0133\u0001\u0000\u0000\u0000&\u0139"+ - "\u0001\u0000\u0000\u0000(\u013b\u0001\u0000\u0000\u0000*\u0144\u0001\u0000"+ - "\u0000\u0000,\u0148\u0001\u0000\u0000\u0000.\u014b\u0001\u0000\u0000\u0000"+ - "0\u0153\u0001\u0000\u0000\u00002\u0159\u0001\u0000\u0000\u00004\u0161"+ - "\u0001\u0000\u0000\u00006\u0169\u0001\u0000\u0000\u00008\u016b\u0001\u0000"+ - "\u0000\u0000:\u0197\u0001\u0000\u0000\u0000<\u0199\u0001\u0000\u0000\u0000"+ - ">\u019c\u0001\u0000\u0000\u0000@\u01a5\u0001\u0000\u0000\u0000B\u01ad"+ - "\u0001\u0000\u0000\u0000D\u01b6\u0001\u0000\u0000\u0000F\u01bf\u0001\u0000"+ - "\u0000\u0000H\u01c8\u0001\u0000\u0000\u0000J\u01cc\u0001\u0000\u0000\u0000"+ - "L\u01d2\u0001\u0000\u0000\u0000N\u01d6\u0001\u0000\u0000\u0000P\u01d9"+ - "\u0001\u0000\u0000\u0000R\u01e1\u0001\u0000\u0000\u0000T\u01e5\u0001\u0000"+ - "\u0000\u0000V\u01e9\u0001\u0000\u0000\u0000X\u01ec\u0001\u0000\u0000\u0000"+ - "Z\u01f1\u0001\u0000\u0000\u0000\\\u01f5\u0001\u0000\u0000\u0000^\u01f7"+ - "\u0001\u0000\u0000\u0000`\u01f9\u0001\u0000\u0000\u0000b\u01fc\u0001\u0000"+ - "\u0000\u0000d\u0200\u0001\u0000\u0000\u0000f\u0203\u0001\u0000\u0000\u0000"+ - "h\u0206\u0001\u0000\u0000\u0000j\u021a\u0001\u0000\u0000\u0000lm\u0003"+ - "\u0002\u0001\u0000mn\u0005\u0000\u0000\u0001n\u0001\u0001\u0000\u0000"+ - "\u0000op\u0006\u0001\uffff\uffff\u0000pq\u0003\u0004\u0002\u0000qw\u0001"+ - "\u0000\u0000\u0000rs\n\u0001\u0000\u0000st\u0005\u001a\u0000\u0000tv\u0003"+ - "\u0006\u0003\u0000ur\u0001\u0000\u0000\u0000vy\u0001\u0000\u0000\u0000"+ - "wu\u0001\u0000\u0000\u0000wx\u0001\u0000\u0000\u0000x\u0003\u0001\u0000"+ - "\u0000\u0000yw\u0001\u0000\u0000\u0000z\u0080\u0003`0\u0000{\u0080\u0003"+ - "\u001e\u000f\u0000|\u0080\u0003\u0018\f\u0000}\u0080\u0003d2\u0000~\u0080"+ - "\u0003f3\u0000\u007fz\u0001\u0000\u0000\u0000\u007f{\u0001\u0000\u0000"+ - "\u0000\u007f|\u0001\u0000\u0000\u0000\u007f}\u0001\u0000\u0000\u0000\u007f"+ - "~\u0001\u0000\u0000\u0000\u0080\u0005\u0001\u0000\u0000\u0000\u0081\u008f"+ - "\u0003,\u0016\u0000\u0082\u008f\u00030\u0018\u0000\u0083\u008f\u0003<"+ - "\u001e\u0000\u0084\u008f\u0003B!\u0000\u0085\u008f\u0003>\u001f\u0000"+ - "\u0086\u008f\u0003.\u0017\u0000\u0087\u008f\u0003\b\u0004\u0000\u0088"+ - "\u008f\u0003D\"\u0000\u0089\u008f\u0003F#\u0000\u008a\u008f\u0003J%\u0000"+ - "\u008b\u008f\u0003L&\u0000\u008c\u008f\u0003h4\u0000\u008d\u008f\u0003"+ - "N\'\u0000\u008e\u0081\u0001\u0000\u0000\u0000\u008e\u0082\u0001\u0000"+ - "\u0000\u0000\u008e\u0083\u0001\u0000\u0000\u0000\u008e\u0084\u0001\u0000"+ - "\u0000\u0000\u008e\u0085\u0001\u0000\u0000\u0000\u008e\u0086\u0001\u0000"+ - "\u0000\u0000\u008e\u0087\u0001\u0000\u0000\u0000\u008e\u0088\u0001\u0000"+ - "\u0000\u0000\u008e\u0089\u0001\u0000\u0000\u0000\u008e\u008a\u0001\u0000"+ - "\u0000\u0000\u008e\u008b\u0001\u0000\u0000\u0000\u008e\u008c\u0001\u0000"+ - "\u0000\u0000\u008e\u008d\u0001\u0000\u0000\u0000\u008f\u0007\u0001\u0000"+ - "\u0000\u0000\u0090\u0091\u0005\u0012\u0000\u0000\u0091\u0092\u0003\n\u0005"+ - "\u0000\u0092\t\u0001\u0000\u0000\u0000\u0093\u0094\u0006\u0005\uffff\uffff"+ - "\u0000\u0094\u0095\u0005-\u0000\u0000\u0095\u00b0\u0003\n\u0005\u0007"+ - "\u0096\u00b0\u0003\u000e\u0007\u0000\u0097\u00b0\u0003\f\u0006\u0000\u0098"+ - "\u009a\u0003\u000e\u0007\u0000\u0099\u009b\u0005-\u0000\u0000\u009a\u0099"+ - "\u0001\u0000\u0000\u0000\u009a\u009b\u0001\u0000\u0000\u0000\u009b\u009c"+ - "\u0001\u0000\u0000\u0000\u009c\u009d\u0005*\u0000\u0000\u009d\u009e\u0005"+ - ")\u0000\u0000\u009e\u00a3\u0003\u000e\u0007\u0000\u009f\u00a0\u0005#\u0000"+ - "\u0000\u00a0\u00a2\u0003\u000e\u0007\u0000\u00a1\u009f\u0001\u0000\u0000"+ - "\u0000\u00a2\u00a5\u0001\u0000\u0000\u0000\u00a3\u00a1\u0001\u0000\u0000"+ - "\u0000\u00a3\u00a4\u0001\u0000\u0000\u0000\u00a4\u00a6\u0001\u0000\u0000"+ - "\u0000\u00a5\u00a3\u0001\u0000\u0000\u0000\u00a6\u00a7\u00053\u0000\u0000"+ - "\u00a7\u00b0\u0001\u0000\u0000\u0000\u00a8\u00a9\u0003\u000e\u0007\u0000"+ - "\u00a9\u00ab\u0005+\u0000\u0000\u00aa\u00ac\u0005-\u0000\u0000\u00ab\u00aa"+ - "\u0001\u0000\u0000\u0000\u00ab\u00ac\u0001\u0000\u0000\u0000\u00ac\u00ad"+ - "\u0001\u0000\u0000\u0000\u00ad\u00ae\u0005.\u0000\u0000\u00ae\u00b0\u0001"+ - "\u0000\u0000\u0000\u00af\u0093\u0001\u0000\u0000\u0000\u00af\u0096\u0001"+ - "\u0000\u0000\u0000\u00af\u0097\u0001\u0000\u0000\u0000\u00af\u0098\u0001"+ - "\u0000\u0000\u0000\u00af\u00a8\u0001\u0000\u0000\u0000\u00b0\u00b9\u0001"+ - "\u0000\u0000\u0000\u00b1\u00b2\n\u0004\u0000\u0000\u00b2\u00b3\u0005\u001f"+ - "\u0000\u0000\u00b3\u00b8\u0003\n\u0005\u0005\u00b4\u00b5\n\u0003\u0000"+ - "\u0000\u00b5\u00b6\u00050\u0000\u0000\u00b6\u00b8\u0003\n\u0005\u0004"+ - "\u00b7\u00b1\u0001\u0000\u0000\u0000\u00b7\u00b4\u0001\u0000\u0000\u0000"+ - "\u00b8\u00bb\u0001\u0000\u0000\u0000\u00b9\u00b7\u0001\u0000\u0000\u0000"+ - "\u00b9\u00ba\u0001\u0000\u0000\u0000\u00ba\u000b\u0001\u0000\u0000\u0000"+ - "\u00bb\u00b9\u0001\u0000\u0000\u0000\u00bc\u00be\u0003\u000e\u0007\u0000"+ - "\u00bd\u00bf\u0005-\u0000\u0000\u00be\u00bd\u0001\u0000\u0000\u0000\u00be"+ - "\u00bf\u0001\u0000\u0000\u0000\u00bf\u00c0\u0001\u0000\u0000\u0000\u00c0"+ - "\u00c1\u0005,\u0000\u0000\u00c1\u00c2\u0003\\.\u0000\u00c2\u00cb\u0001"+ - "\u0000\u0000\u0000\u00c3\u00c5\u0003\u000e\u0007\u0000\u00c4\u00c6\u0005"+ - "-\u0000\u0000\u00c5\u00c4\u0001\u0000\u0000\u0000\u00c5\u00c6\u0001\u0000"+ - "\u0000\u0000\u00c6\u00c7\u0001\u0000\u0000\u0000\u00c7\u00c8\u00052\u0000"+ - "\u0000\u00c8\u00c9\u0003\\.\u0000\u00c9\u00cb\u0001\u0000\u0000\u0000"+ - "\u00ca\u00bc\u0001\u0000\u0000\u0000\u00ca\u00c3\u0001\u0000\u0000\u0000"+ - "\u00cb\r\u0001\u0000\u0000\u0000\u00cc\u00d2\u0003\u0010\b\u0000\u00cd"+ - "\u00ce\u0003\u0010\b\u0000\u00ce\u00cf\u0003^/\u0000\u00cf\u00d0\u0003"+ - "\u0010\b\u0000\u00d0\u00d2\u0001\u0000\u0000\u0000\u00d1\u00cc\u0001\u0000"+ - "\u0000\u0000\u00d1\u00cd\u0001\u0000\u0000\u0000\u00d2\u000f\u0001\u0000"+ - "\u0000\u0000\u00d3\u00d4\u0006\b\uffff\uffff\u0000\u00d4\u00d8\u0003\u0012"+ - "\t\u0000\u00d5\u00d6\u0007\u0000\u0000\u0000\u00d6\u00d8\u0003\u0010\b"+ - "\u0003\u00d7\u00d3\u0001\u0000\u0000\u0000\u00d7\u00d5\u0001\u0000\u0000"+ - "\u0000\u00d8\u00e1\u0001\u0000\u0000\u0000\u00d9\u00da\n\u0002\u0000\u0000"+ - "\u00da\u00db\u0007\u0001\u0000\u0000\u00db\u00e0\u0003\u0010\b\u0003\u00dc"+ - "\u00dd\n\u0001\u0000\u0000\u00dd\u00de\u0007\u0000\u0000\u0000\u00de\u00e0"+ - "\u0003\u0010\b\u0002\u00df\u00d9\u0001\u0000\u0000\u0000\u00df\u00dc\u0001"+ - "\u0000\u0000\u0000\u00e0\u00e3\u0001\u0000\u0000\u0000\u00e1\u00df\u0001"+ - "\u0000\u0000\u0000\u00e1\u00e2\u0001\u0000\u0000\u0000\u00e2\u0011\u0001"+ - "\u0000\u0000\u0000\u00e3\u00e1\u0001\u0000\u0000\u0000\u00e4\u00e5\u0006"+ - "\t\uffff\uffff\u0000\u00e5\u00ed\u0003:\u001d\u0000\u00e6\u00ed\u0003"+ - "2\u0019\u0000\u00e7\u00ed\u0003\u0014\n\u0000\u00e8\u00e9\u0005)\u0000"+ - "\u0000\u00e9\u00ea\u0003\n\u0005\u0000\u00ea\u00eb\u00053\u0000\u0000"+ - "\u00eb\u00ed\u0001\u0000\u0000\u0000\u00ec\u00e4\u0001\u0000\u0000\u0000"+ - "\u00ec\u00e6\u0001\u0000\u0000\u0000\u00ec\u00e7\u0001\u0000\u0000\u0000"+ - "\u00ec\u00e8\u0001\u0000\u0000\u0000\u00ed\u00f3\u0001\u0000\u0000\u0000"+ - "\u00ee\u00ef\n\u0001\u0000\u0000\u00ef\u00f0\u0005\"\u0000\u0000\u00f0"+ - "\u00f2\u0003\u0016\u000b\u0000\u00f1\u00ee\u0001\u0000\u0000\u0000\u00f2"+ - "\u00f5\u0001\u0000\u0000\u0000\u00f3\u00f1\u0001\u0000\u0000\u0000\u00f3"+ - "\u00f4\u0001\u0000\u0000\u0000\u00f4\u0013\u0001\u0000\u0000\u0000\u00f5"+ - "\u00f3\u0001\u0000\u0000\u0000\u00f6\u00f7\u00036\u001b\u0000\u00f7\u0101"+ - "\u0005)\u0000\u0000\u00f8\u0102\u0005>\u0000\u0000\u00f9\u00fe\u0003\n"+ - "\u0005\u0000\u00fa\u00fb\u0005#\u0000\u0000\u00fb\u00fd\u0003\n\u0005"+ - "\u0000\u00fc\u00fa\u0001\u0000\u0000\u0000\u00fd\u0100\u0001\u0000\u0000"+ - "\u0000\u00fe\u00fc\u0001\u0000\u0000\u0000\u00fe\u00ff\u0001\u0000\u0000"+ - "\u0000\u00ff\u0102\u0001\u0000\u0000\u0000\u0100\u00fe\u0001\u0000\u0000"+ - "\u0000\u0101\u00f8\u0001\u0000\u0000\u0000\u0101\u00f9\u0001\u0000\u0000"+ - "\u0000\u0101\u0102\u0001\u0000\u0000\u0000\u0102\u0103\u0001\u0000\u0000"+ - "\u0000\u0103\u0104\u00053\u0000\u0000\u0104\u0015\u0001\u0000\u0000\u0000"+ - "\u0105\u0106\u00036\u001b\u0000\u0106\u0017\u0001\u0000\u0000\u0000\u0107"+ - "\u0108\u0005\u000e\u0000\u0000\u0108\u0109\u0003\u001a\r\u0000\u0109\u0019"+ - "\u0001\u0000\u0000\u0000\u010a\u010f\u0003\u001c\u000e\u0000\u010b\u010c"+ - "\u0005#\u0000\u0000\u010c\u010e\u0003\u001c\u000e\u0000\u010d\u010b\u0001"+ - "\u0000\u0000\u0000\u010e\u0111\u0001\u0000\u0000\u0000\u010f\u010d\u0001"+ - "\u0000\u0000\u0000\u010f\u0110\u0001\u0000\u0000\u0000\u0110\u001b\u0001"+ - "\u0000\u0000\u0000\u0111\u010f\u0001\u0000\u0000\u0000\u0112\u0118\u0003"+ - "\n\u0005\u0000\u0113\u0114\u00032\u0019\u0000\u0114\u0115\u0005!\u0000"+ - "\u0000\u0115\u0116\u0003\n\u0005\u0000\u0116\u0118\u0001\u0000\u0000\u0000"+ - "\u0117\u0112\u0001\u0000\u0000\u0000\u0117\u0113\u0001\u0000\u0000\u0000"+ - "\u0118\u001d\u0001\u0000\u0000\u0000\u0119\u011a\u0005\u0006\u0000\u0000"+ - "\u011a\u011f\u0003 \u0010\u0000\u011b\u011c\u0005#\u0000\u0000\u011c\u011e"+ - "\u0003 \u0010\u0000\u011d\u011b\u0001\u0000\u0000\u0000\u011e\u0121\u0001"+ - "\u0000\u0000\u0000\u011f\u011d\u0001\u0000\u0000\u0000\u011f\u0120\u0001"+ - "\u0000\u0000\u0000\u0120\u0123\u0001\u0000\u0000\u0000\u0121\u011f\u0001"+ - "\u0000\u0000\u0000\u0122\u0124\u0003&\u0013\u0000\u0123\u0122\u0001\u0000"+ - "\u0000\u0000\u0123\u0124\u0001\u0000\u0000\u0000\u0124\u0126\u0001\u0000"+ - "\u0000\u0000\u0125\u0127\u0003\"\u0011\u0000\u0126\u0125\u0001\u0000\u0000"+ - "\u0000\u0126\u0127\u0001\u0000\u0000\u0000\u0127\u001f\u0001\u0000\u0000"+ - "\u0000\u0128\u0129\u0007\u0002\u0000\u0000\u0129!\u0001\u0000\u0000\u0000"+ - "\u012a\u012b\u0005H\u0000\u0000\u012b\u0130\u0003$\u0012\u0000\u012c\u012d"+ - "\u0005#\u0000\u0000\u012d\u012f\u0003$\u0012\u0000\u012e\u012c\u0001\u0000"+ - "\u0000\u0000\u012f\u0132\u0001\u0000\u0000\u0000\u0130\u012e\u0001\u0000"+ - "\u0000\u0000\u0130\u0131\u0001\u0000\u0000\u0000\u0131#\u0001\u0000\u0000"+ - "\u0000\u0132\u0130\u0001\u0000\u0000\u0000\u0133\u0134\u0003\\.\u0000"+ - "\u0134\u0135\u0005!\u0000\u0000\u0135\u0136\u0003\\.\u0000\u0136%\u0001"+ - "\u0000\u0000\u0000\u0137\u013a\u0003(\u0014\u0000\u0138\u013a\u0003*\u0015"+ - "\u0000\u0139\u0137\u0001\u0000\u0000\u0000\u0139\u0138\u0001\u0000\u0000"+ - "\u0000\u013a\'\u0001\u0000\u0000\u0000\u013b\u013c\u0005I\u0000\u0000"+ - "\u013c\u0141\u0003 \u0010\u0000\u013d\u013e\u0005#\u0000\u0000\u013e\u0140"+ - "\u0003 \u0010\u0000\u013f\u013d\u0001\u0000\u0000\u0000\u0140\u0143\u0001"+ - "\u0000\u0000\u0000\u0141\u013f\u0001\u0000\u0000\u0000\u0141\u0142\u0001"+ - "\u0000\u0000\u0000\u0142)\u0001\u0000\u0000\u0000\u0143\u0141\u0001\u0000"+ - "\u0000\u0000\u0144\u0145\u0005A\u0000\u0000\u0145\u0146\u0003(\u0014\u0000"+ - "\u0146\u0147\u0005B\u0000\u0000\u0147+\u0001\u0000\u0000\u0000\u0148\u0149"+ - "\u0005\u0004\u0000\u0000\u0149\u014a\u0003\u001a\r\u0000\u014a-\u0001"+ - "\u0000\u0000\u0000\u014b\u014d\u0005\u0011\u0000\u0000\u014c\u014e\u0003"+ - "\u001a\r\u0000\u014d\u014c\u0001\u0000\u0000\u0000\u014d\u014e\u0001\u0000"+ - "\u0000\u0000\u014e\u0151\u0001\u0000\u0000\u0000\u014f\u0150\u0005\u001e"+ - "\u0000\u0000\u0150\u0152\u0003\u001a\r\u0000\u0151\u014f\u0001\u0000\u0000"+ - "\u0000\u0151\u0152\u0001\u0000\u0000\u0000\u0152/\u0001\u0000\u0000\u0000"+ - "\u0153\u0154\u0005\b\u0000\u0000\u0154\u0157\u0003\u001a\r\u0000\u0155"+ - "\u0156\u0005\u001e\u0000\u0000\u0156\u0158\u0003\u001a\r\u0000\u0157\u0155"+ - "\u0001\u0000\u0000\u0000\u0157\u0158\u0001\u0000\u0000\u0000\u01581\u0001"+ - "\u0000\u0000\u0000\u0159\u015e\u00036\u001b\u0000\u015a\u015b\u0005%\u0000"+ - "\u0000\u015b\u015d\u00036\u001b\u0000\u015c\u015a\u0001\u0000\u0000\u0000"+ - "\u015d\u0160\u0001\u0000\u0000\u0000\u015e\u015c\u0001\u0000\u0000\u0000"+ - "\u015e\u015f\u0001\u0000\u0000\u0000\u015f3\u0001\u0000\u0000\u0000\u0160"+ - "\u015e\u0001\u0000\u0000\u0000\u0161\u0166\u00038\u001c\u0000\u0162\u0163"+ - "\u0005%\u0000\u0000\u0163\u0165\u00038\u001c\u0000\u0164\u0162\u0001\u0000"+ - "\u0000\u0000\u0165\u0168\u0001\u0000\u0000\u0000\u0166\u0164\u0001\u0000"+ - "\u0000\u0000\u0166\u0167\u0001\u0000\u0000\u0000\u01675\u0001\u0000\u0000"+ - "\u0000\u0168\u0166\u0001\u0000\u0000\u0000\u0169\u016a\u0007\u0003\u0000"+ - "\u0000\u016a7\u0001\u0000\u0000\u0000\u016b\u016c\u0005N\u0000\u0000\u016c"+ - "9\u0001\u0000\u0000\u0000\u016d\u0198\u0005.\u0000\u0000\u016e\u016f\u0003"+ - "Z-\u0000\u016f\u0170\u0005C\u0000\u0000\u0170\u0198\u0001\u0000\u0000"+ - "\u0000\u0171\u0198\u0003X,\u0000\u0172\u0198\u0003Z-\u0000\u0173\u0198"+ - "\u0003T*\u0000\u0174\u0198\u00051\u0000\u0000\u0175\u0198\u0003\\.\u0000"+ - "\u0176\u0177\u0005A\u0000\u0000\u0177\u017c\u0003V+\u0000\u0178\u0179"+ - "\u0005#\u0000\u0000\u0179\u017b\u0003V+\u0000\u017a\u0178\u0001\u0000"+ - "\u0000\u0000\u017b\u017e\u0001\u0000\u0000\u0000\u017c\u017a\u0001\u0000"+ - "\u0000\u0000\u017c\u017d\u0001\u0000\u0000\u0000\u017d\u017f\u0001\u0000"+ - "\u0000\u0000\u017e\u017c\u0001\u0000\u0000\u0000\u017f\u0180\u0005B\u0000"+ - "\u0000\u0180\u0198\u0001\u0000\u0000\u0000\u0181\u0182\u0005A\u0000\u0000"+ - "\u0182\u0187\u0003T*\u0000\u0183\u0184\u0005#\u0000\u0000\u0184\u0186"+ - "\u0003T*\u0000\u0185\u0183\u0001\u0000\u0000\u0000\u0186\u0189\u0001\u0000"+ - "\u0000\u0000\u0187\u0185\u0001\u0000\u0000\u0000\u0187\u0188\u0001\u0000"+ - "\u0000\u0000\u0188\u018a\u0001\u0000\u0000\u0000\u0189\u0187\u0001\u0000"+ - "\u0000\u0000\u018a\u018b\u0005B\u0000\u0000\u018b\u0198\u0001\u0000\u0000"+ - "\u0000\u018c\u018d\u0005A\u0000\u0000\u018d\u0192\u0003\\.\u0000\u018e"+ - "\u018f\u0005#\u0000\u0000\u018f\u0191\u0003\\.\u0000\u0190\u018e\u0001"+ - "\u0000\u0000\u0000\u0191\u0194\u0001\u0000\u0000\u0000\u0192\u0190\u0001"+ - "\u0000\u0000\u0000\u0192\u0193\u0001\u0000\u0000\u0000\u0193\u0195\u0001"+ - "\u0000\u0000\u0000\u0194\u0192\u0001\u0000\u0000\u0000\u0195\u0196\u0005"+ - "B\u0000\u0000\u0196\u0198\u0001\u0000\u0000\u0000\u0197\u016d\u0001\u0000"+ - "\u0000\u0000\u0197\u016e\u0001\u0000\u0000\u0000\u0197\u0171\u0001\u0000"+ - "\u0000\u0000\u0197\u0172\u0001\u0000\u0000\u0000\u0197\u0173\u0001\u0000"+ - "\u0000\u0000\u0197\u0174\u0001\u0000\u0000\u0000\u0197\u0175\u0001\u0000"+ - "\u0000\u0000\u0197\u0176\u0001\u0000\u0000\u0000\u0197\u0181\u0001\u0000"+ - "\u0000\u0000\u0197\u018c\u0001\u0000\u0000\u0000\u0198;\u0001\u0000\u0000"+ - "\u0000\u0199\u019a\u0005\n\u0000\u0000\u019a\u019b\u0005\u001c\u0000\u0000"+ - "\u019b=\u0001\u0000\u0000\u0000\u019c\u019d\u0005\u0010\u0000\u0000\u019d"+ - "\u01a2\u0003@ \u0000\u019e\u019f\u0005#\u0000\u0000\u019f\u01a1\u0003"+ - "@ \u0000\u01a0\u019e\u0001\u0000\u0000\u0000\u01a1\u01a4\u0001\u0000\u0000"+ - "\u0000\u01a2\u01a0\u0001\u0000\u0000\u0000\u01a2\u01a3\u0001\u0000\u0000"+ - "\u0000\u01a3?\u0001\u0000\u0000\u0000\u01a4\u01a2\u0001\u0000\u0000\u0000"+ - "\u01a5\u01a7\u0003\n\u0005\u0000\u01a6\u01a8\u0007\u0004\u0000\u0000\u01a7"+ - "\u01a6\u0001\u0000\u0000\u0000\u01a7\u01a8\u0001\u0000\u0000\u0000\u01a8"+ - "\u01ab\u0001\u0000\u0000\u0000\u01a9\u01aa\u0005/\u0000\u0000\u01aa\u01ac"+ - "\u0007\u0005\u0000\u0000\u01ab\u01a9\u0001\u0000\u0000\u0000\u01ab\u01ac"+ - "\u0001\u0000\u0000\u0000\u01acA\u0001\u0000\u0000\u0000\u01ad\u01ae\u0005"+ - "\t\u0000\u0000\u01ae\u01b3\u00034\u001a\u0000\u01af\u01b0\u0005#\u0000"+ - "\u0000\u01b0\u01b2\u00034\u001a\u0000\u01b1\u01af\u0001\u0000\u0000\u0000"+ - "\u01b2\u01b5\u0001\u0000\u0000\u0000\u01b3\u01b1\u0001\u0000\u0000\u0000"+ - "\u01b3\u01b4\u0001\u0000\u0000\u0000\u01b4C\u0001\u0000\u0000\u0000\u01b5"+ - "\u01b3\u0001\u0000\u0000\u0000\u01b6\u01b7\u0005\u0002\u0000\u0000\u01b7"+ - "\u01bc\u00034\u001a\u0000\u01b8\u01b9\u0005#\u0000\u0000\u01b9\u01bb\u0003"+ - "4\u001a\u0000\u01ba\u01b8\u0001\u0000\u0000\u0000\u01bb\u01be\u0001\u0000"+ - "\u0000\u0000\u01bc\u01ba\u0001\u0000\u0000\u0000\u01bc\u01bd\u0001\u0000"+ - "\u0000\u0000\u01bdE\u0001\u0000\u0000\u0000\u01be\u01bc\u0001\u0000\u0000"+ - "\u0000\u01bf\u01c0\u0005\r\u0000\u0000\u01c0\u01c5\u0003H$\u0000\u01c1"+ - "\u01c2\u0005#\u0000\u0000\u01c2\u01c4\u0003H$\u0000\u01c3\u01c1\u0001"+ - "\u0000\u0000\u0000\u01c4\u01c7\u0001\u0000\u0000\u0000\u01c5\u01c3\u0001"+ - "\u0000\u0000\u0000\u01c5\u01c6\u0001\u0000\u0000\u0000\u01c6G\u0001\u0000"+ - "\u0000\u0000\u01c7\u01c5\u0001\u0000\u0000\u0000\u01c8\u01c9\u00034\u001a"+ - "\u0000\u01c9\u01ca\u0005R\u0000\u0000\u01ca\u01cb\u00034\u001a\u0000\u01cb"+ - "I\u0001\u0000\u0000\u0000\u01cc\u01cd\u0005\u0001\u0000\u0000\u01cd\u01ce"+ - "\u0003\u0012\t\u0000\u01ce\u01d0\u0003\\.\u0000\u01cf\u01d1\u0003P(\u0000"+ - "\u01d0\u01cf\u0001\u0000\u0000\u0000\u01d0\u01d1\u0001\u0000\u0000\u0000"+ - "\u01d1K\u0001\u0000\u0000\u0000\u01d2\u01d3\u0005\u0007\u0000\u0000\u01d3"+ - "\u01d4\u0003\u0012\t\u0000\u01d4\u01d5\u0003\\.\u0000\u01d5M\u0001\u0000"+ - "\u0000\u0000\u01d6\u01d7\u0005\f\u0000\u0000\u01d7\u01d8\u00032\u0019"+ - "\u0000\u01d8O\u0001\u0000\u0000\u0000\u01d9\u01de\u0003R)\u0000\u01da"+ - "\u01db\u0005#\u0000\u0000\u01db\u01dd\u0003R)\u0000\u01dc\u01da\u0001"+ - "\u0000\u0000\u0000\u01dd\u01e0\u0001\u0000\u0000\u0000\u01de\u01dc\u0001"+ - "\u0000\u0000\u0000\u01de\u01df\u0001\u0000\u0000\u0000\u01dfQ\u0001\u0000"+ - "\u0000\u0000\u01e0\u01de\u0001\u0000\u0000\u0000\u01e1\u01e2\u00036\u001b"+ - "\u0000\u01e2\u01e3\u0005!\u0000\u0000\u01e3\u01e4\u0003:\u001d\u0000\u01e4"+ - "S\u0001\u0000\u0000\u0000\u01e5\u01e6\u0007\u0006\u0000\u0000\u01e6U\u0001"+ - "\u0000\u0000\u0000\u01e7\u01ea\u0003X,\u0000\u01e8\u01ea\u0003Z-\u0000"+ - "\u01e9\u01e7\u0001\u0000\u0000\u0000\u01e9\u01e8\u0001\u0000\u0000\u0000"+ - "\u01eaW\u0001\u0000\u0000\u0000\u01eb\u01ed\u0007\u0000\u0000\u0000\u01ec"+ - "\u01eb\u0001\u0000\u0000\u0000\u01ec\u01ed\u0001\u0000\u0000\u0000\u01ed"+ - "\u01ee\u0001\u0000\u0000\u0000\u01ee\u01ef\u0005\u001d\u0000\u0000\u01ef"+ - "Y\u0001\u0000\u0000\u0000\u01f0\u01f2\u0007\u0000\u0000\u0000\u01f1\u01f0"+ - "\u0001\u0000\u0000\u0000\u01f1\u01f2\u0001\u0000\u0000\u0000\u01f2\u01f3"+ - "\u0001\u0000\u0000\u0000\u01f3\u01f4\u0005\u001c\u0000\u0000\u01f4[\u0001"+ - "\u0000\u0000\u0000\u01f5\u01f6\u0005\u001b\u0000\u0000\u01f6]\u0001\u0000"+ - "\u0000\u0000\u01f7\u01f8\u0007\u0007\u0000\u0000\u01f8_\u0001\u0000\u0000"+ - "\u0000\u01f9\u01fa\u0005\u0005\u0000\u0000\u01fa\u01fb\u0003b1\u0000\u01fb"+ - "a\u0001\u0000\u0000\u0000\u01fc\u01fd\u0005A\u0000\u0000\u01fd\u01fe\u0003"+ - "\u0002\u0001\u0000\u01fe\u01ff\u0005B\u0000\u0000\u01ffc\u0001\u0000\u0000"+ - "\u0000\u0200\u0201\u0005\u000f\u0000\u0000\u0201\u0202\u0005b\u0000\u0000"+ - "\u0202e\u0001\u0000\u0000\u0000\u0203\u0204\u0005\u000b\u0000\u0000\u0204"+ - "\u0205\u0005f\u0000\u0000\u0205g\u0001\u0000\u0000\u0000\u0206\u0207\u0005"+ - "\u0003\u0000\u0000\u0207\u020a\u0005X\u0000\u0000\u0208\u0209\u0005V\u0000"+ - "\u0000\u0209\u020b\u00034\u001a\u0000\u020a\u0208\u0001\u0000\u0000\u0000"+ - "\u020a\u020b\u0001\u0000\u0000\u0000\u020b\u0215\u0001\u0000\u0000\u0000"+ - "\u020c\u020d\u0005W\u0000\u0000\u020d\u0212\u0003j5\u0000\u020e\u020f"+ - "\u0005#\u0000\u0000\u020f\u0211\u0003j5\u0000\u0210\u020e\u0001\u0000"+ - "\u0000\u0000\u0211\u0214\u0001\u0000\u0000\u0000\u0212\u0210\u0001\u0000"+ - "\u0000\u0000\u0212\u0213\u0001\u0000\u0000\u0000\u0213\u0216\u0001\u0000"+ - "\u0000\u0000\u0214\u0212\u0001\u0000\u0000\u0000\u0215\u020c\u0001\u0000"+ - "\u0000\u0000\u0215\u0216\u0001\u0000\u0000\u0000\u0216i\u0001\u0000\u0000"+ - "\u0000\u0217\u0218\u00034\u001a\u0000\u0218\u0219\u0005!\u0000\u0000\u0219"+ - "\u021b\u0001\u0000\u0000\u0000\u021a\u0217\u0001\u0000\u0000\u0000\u021a"+ - "\u021b\u0001\u0000\u0000\u0000\u021b\u021c\u0001\u0000\u0000\u0000\u021c"+ - "\u021d\u00034\u001a\u0000\u021dk\u0001\u0000\u0000\u00004w\u007f\u008e"+ - "\u009a\u00a3\u00ab\u00af\u00b7\u00b9\u00be\u00c5\u00ca\u00d1\u00d7\u00df"+ - "\u00e1\u00ec\u00f3\u00fe\u0101\u010f\u0117\u011f\u0123\u0126\u0130\u0139"+ - "\u0141\u014d\u0151\u0157\u015e\u0166\u017c\u0187\u0192\u0197\u01a2\u01a7"+ - "\u01ab\u01b3\u01bc\u01c5\u01d0\u01de\u01e9\u01ec\u01f1\u020a\u0212\u0215"+ - "\u021a"; + "2\u00072\u00023\u00073\u00024\u00074\u00025\u00075\u00026\u00076\u0001"+ + "\u0000\u0001\u0000\u0001\u0000\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ + "\u0001\u0001\u0001\u0001\u0001\u0005\u0001x\b\u0001\n\u0001\f\u0001{\t"+ + "\u0001\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001"+ + "\u0002\u0003\u0002\u0083\b\u0002\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ + "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ + "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0003\u0003\u0092\b\u0003\u0001"+ + "\u0004\u0001\u0004\u0001\u0004\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ + "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u009e\b\u0005\u0001"+ + "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0005\u0005\u00a5"+ + "\b\u0005\n\u0005\f\u0005\u00a8\t\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ + "\u0001\u0005\u0001\u0005\u0003\u0005\u00af\b\u0005\u0001\u0005\u0001\u0005"+ + "\u0003\u0005\u00b3\b\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ + "\u0001\u0005\u0001\u0005\u0005\u0005\u00bb\b\u0005\n\u0005\f\u0005\u00be"+ + "\t\u0005\u0001\u0006\u0001\u0006\u0003\u0006\u00c2\b\u0006\u0001\u0006"+ + "\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0003\u0006\u00c9\b\u0006"+ + "\u0001\u0006\u0001\u0006\u0001\u0006\u0003\u0006\u00ce\b\u0006\u0001\u0007"+ + "\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0003\u0007\u00d5\b\u0007"+ + "\u0001\b\u0001\b\u0001\b\u0001\b\u0003\b\u00db\b\b\u0001\b\u0001\b\u0001"+ + "\b\u0001\b\u0001\b\u0001\b\u0005\b\u00e3\b\b\n\b\f\b\u00e6\t\b\u0001\t"+ + "\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0003\t\u00f0"+ + "\b\t\u0001\t\u0001\t\u0001\t\u0005\t\u00f5\b\t\n\t\f\t\u00f8\t\t\u0001"+ + "\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0005\n\u0100\b\n\n\n\f\n\u0103"+ + "\t\n\u0003\n\u0105\b\n\u0001\n\u0001\n\u0001\u000b\u0001\u000b\u0001\f"+ + "\u0001\f\u0001\f\u0001\r\u0001\r\u0001\r\u0005\r\u0111\b\r\n\r\f\r\u0114"+ + "\t\r\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0003"+ + "\u000e\u011b\b\u000e\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0005"+ + "\u000f\u0121\b\u000f\n\u000f\f\u000f\u0124\t\u000f\u0001\u000f\u0003\u000f"+ + "\u0127\b\u000f\u0001\u000f\u0003\u000f\u012a\b\u000f\u0001\u0010\u0001"+ + "\u0010\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0005\u0011\u0132"+ + "\b\u0011\n\u0011\f\u0011\u0135\t\u0011\u0001\u0012\u0001\u0012\u0001\u0012"+ + "\u0001\u0012\u0001\u0013\u0001\u0013\u0003\u0013\u013d\b\u0013\u0001\u0014"+ + "\u0001\u0014\u0001\u0014\u0001\u0014\u0005\u0014\u0143\b\u0014\n\u0014"+ + "\f\u0014\u0146\t\u0014\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015"+ + "\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0005\u0016\u0150\b\u0016"+ + "\n\u0016\f\u0016\u0153\t\u0016\u0001\u0016\u0003\u0016\u0156\b\u0016\u0001"+ + "\u0016\u0001\u0016\u0003\u0016\u015a\b\u0016\u0001\u0017\u0001\u0017\u0001"+ + "\u0017\u0001\u0018\u0001\u0018\u0003\u0018\u0161\b\u0018\u0001\u0018\u0001"+ + "\u0018\u0003\u0018\u0165\b\u0018\u0001\u0019\u0001\u0019\u0001\u0019\u0001"+ + "\u0019\u0003\u0019\u016b\b\u0019\u0001\u001a\u0001\u001a\u0001\u001a\u0005"+ + "\u001a\u0170\b\u001a\n\u001a\f\u001a\u0173\t\u001a\u0001\u001b\u0001\u001b"+ + "\u0001\u001b\u0005\u001b\u0178\b\u001b\n\u001b\f\u001b\u017b\t\u001b\u0001"+ + "\u001c\u0001\u001c\u0001\u001d\u0001\u001d\u0001\u001e\u0001\u001e\u0001"+ + "\u001e\u0001\u001e\u0001\u001e\u0001\u001e\u0001\u001e\u0001\u001e\u0001"+ + "\u001e\u0001\u001e\u0001\u001e\u0001\u001e\u0001\u001e\u0005\u001e\u018e"+ + "\b\u001e\n\u001e\f\u001e\u0191\t\u001e\u0001\u001e\u0001\u001e\u0001\u001e"+ + "\u0001\u001e\u0001\u001e\u0001\u001e\u0005\u001e\u0199\b\u001e\n\u001e"+ + "\f\u001e\u019c\t\u001e\u0001\u001e\u0001\u001e\u0001\u001e\u0001\u001e"+ + "\u0001\u001e\u0001\u001e\u0005\u001e\u01a4\b\u001e\n\u001e\f\u001e\u01a7"+ + "\t\u001e\u0001\u001e\u0001\u001e\u0003\u001e\u01ab\b\u001e\u0001\u001f"+ + "\u0001\u001f\u0001\u001f\u0001 \u0001 \u0001 \u0001 \u0005 \u01b4\b \n"+ + " \f \u01b7\t \u0001!\u0001!\u0003!\u01bb\b!\u0001!\u0001!\u0003!\u01bf"+ + "\b!\u0001\"\u0001\"\u0001\"\u0001\"\u0005\"\u01c5\b\"\n\"\f\"\u01c8\t"+ + "\"\u0001#\u0001#\u0001#\u0001#\u0005#\u01ce\b#\n#\f#\u01d1\t#\u0001$\u0001"+ + "$\u0001$\u0001$\u0005$\u01d7\b$\n$\f$\u01da\t$\u0001%\u0001%\u0001%\u0001"+ + "%\u0001&\u0001&\u0001&\u0001&\u0003&\u01e4\b&\u0001\'\u0001\'\u0001\'"+ + "\u0001\'\u0001(\u0001(\u0001(\u0001)\u0001)\u0001)\u0005)\u01f0\b)\n)"+ + "\f)\u01f3\t)\u0001*\u0001*\u0001*\u0001*\u0001+\u0001+\u0001,\u0001,\u0003"+ + ",\u01fd\b,\u0001-\u0003-\u0200\b-\u0001-\u0001-\u0001.\u0003.\u0205\b"+ + ".\u0001.\u0001.\u0001/\u0001/\u00010\u00010\u00011\u00011\u00011\u0001"+ + "2\u00012\u00012\u00012\u00013\u00013\u00013\u00014\u00014\u00014\u0001"+ + "5\u00015\u00015\u00015\u00035\u021e\b5\u00015\u00015\u00015\u00015\u0005"+ + "5\u0224\b5\n5\f5\u0227\t5\u00035\u0229\b5\u00016\u00016\u00016\u00036"+ + "\u022e\b6\u00016\u00016\u00016\u0000\u0004\u0002\n\u0010\u00127\u0000"+ + "\u0002\u0004\u0006\b\n\f\u000e\u0010\u0012\u0014\u0016\u0018\u001a\u001c"+ + "\u001e \"$&(*,.02468:<>@BDFHJLNPRTVXZ\\^`bdfhjl\u0000\u0007\u0001\u0000"+ + ">?\u0001\u0000@B\u0001\u0000EF\u0002\u0000\"\"&&\u0001\u0000)*\u0002\u0000"+ + "((66\u0002\u0000779=\u024e\u0000n\u0001\u0000\u0000\u0000\u0002q\u0001"+ + "\u0000\u0000\u0000\u0004\u0082\u0001\u0000\u0000\u0000\u0006\u0091\u0001"+ + "\u0000\u0000\u0000\b\u0093\u0001\u0000\u0000\u0000\n\u00b2\u0001\u0000"+ + "\u0000\u0000\f\u00cd\u0001\u0000\u0000\u0000\u000e\u00d4\u0001\u0000\u0000"+ + "\u0000\u0010\u00da\u0001\u0000\u0000\u0000\u0012\u00ef\u0001\u0000\u0000"+ + "\u0000\u0014\u00f9\u0001\u0000\u0000\u0000\u0016\u0108\u0001\u0000\u0000"+ + "\u0000\u0018\u010a\u0001\u0000\u0000\u0000\u001a\u010d\u0001\u0000\u0000"+ + "\u0000\u001c\u011a\u0001\u0000\u0000\u0000\u001e\u011c\u0001\u0000\u0000"+ + "\u0000 \u012b\u0001\u0000\u0000\u0000\"\u012d\u0001\u0000\u0000\u0000"+ + "$\u0136\u0001\u0000\u0000\u0000&\u013c\u0001\u0000\u0000\u0000(\u013e"+ + "\u0001\u0000\u0000\u0000*\u0147\u0001\u0000\u0000\u0000,\u014b\u0001\u0000"+ + "\u0000\u0000.\u015b\u0001\u0000\u0000\u00000\u015e\u0001\u0000\u0000\u0000"+ + "2\u0166\u0001\u0000\u0000\u00004\u016c\u0001\u0000\u0000\u00006\u0174"+ + "\u0001\u0000\u0000\u00008\u017c\u0001\u0000\u0000\u0000:\u017e\u0001\u0000"+ + "\u0000\u0000<\u01aa\u0001\u0000\u0000\u0000>\u01ac\u0001\u0000\u0000\u0000"+ + "@\u01af\u0001\u0000\u0000\u0000B\u01b8\u0001\u0000\u0000\u0000D\u01c0"+ + "\u0001\u0000\u0000\u0000F\u01c9\u0001\u0000\u0000\u0000H\u01d2\u0001\u0000"+ + "\u0000\u0000J\u01db\u0001\u0000\u0000\u0000L\u01df\u0001\u0000\u0000\u0000"+ + "N\u01e5\u0001\u0000\u0000\u0000P\u01e9\u0001\u0000\u0000\u0000R\u01ec"+ + "\u0001\u0000\u0000\u0000T\u01f4\u0001\u0000\u0000\u0000V\u01f8\u0001\u0000"+ + "\u0000\u0000X\u01fc\u0001\u0000\u0000\u0000Z\u01ff\u0001\u0000\u0000\u0000"+ + "\\\u0204\u0001\u0000\u0000\u0000^\u0208\u0001\u0000\u0000\u0000`\u020a"+ + "\u0001\u0000\u0000\u0000b\u020c\u0001\u0000\u0000\u0000d\u020f\u0001\u0000"+ + "\u0000\u0000f\u0213\u0001\u0000\u0000\u0000h\u0216\u0001\u0000\u0000\u0000"+ + "j\u0219\u0001\u0000\u0000\u0000l\u022d\u0001\u0000\u0000\u0000no\u0003"+ + "\u0002\u0001\u0000op\u0005\u0000\u0000\u0001p\u0001\u0001\u0000\u0000"+ + "\u0000qr\u0006\u0001\uffff\uffff\u0000rs\u0003\u0004\u0002\u0000sy\u0001"+ + "\u0000\u0000\u0000tu\n\u0001\u0000\u0000uv\u0005\u001c\u0000\u0000vx\u0003"+ + "\u0006\u0003\u0000wt\u0001\u0000\u0000\u0000x{\u0001\u0000\u0000\u0000"+ + "yw\u0001\u0000\u0000\u0000yz\u0001\u0000\u0000\u0000z\u0003\u0001\u0000"+ + "\u0000\u0000{y\u0001\u0000\u0000\u0000|\u0083\u0003b1\u0000}\u0083\u0003"+ + "\u001e\u000f\u0000~\u0083\u0003\u0018\f\u0000\u007f\u0083\u0003,\u0016"+ + "\u0000\u0080\u0083\u0003f3\u0000\u0081\u0083\u0003h4\u0000\u0082|\u0001"+ + "\u0000\u0000\u0000\u0082}\u0001\u0000\u0000\u0000\u0082~\u0001\u0000\u0000"+ + "\u0000\u0082\u007f\u0001\u0000\u0000\u0000\u0082\u0080\u0001\u0000\u0000"+ + "\u0000\u0082\u0081\u0001\u0000\u0000\u0000\u0083\u0005\u0001\u0000\u0000"+ + "\u0000\u0084\u0092\u0003.\u0017\u0000\u0085\u0092\u00032\u0019\u0000\u0086"+ + "\u0092\u0003>\u001f\u0000\u0087\u0092\u0003D\"\u0000\u0088\u0092\u0003"+ + "@ \u0000\u0089\u0092\u00030\u0018\u0000\u008a\u0092\u0003\b\u0004\u0000"+ + "\u008b\u0092\u0003F#\u0000\u008c\u0092\u0003H$\u0000\u008d\u0092\u0003"+ + "L&\u0000\u008e\u0092\u0003N\'\u0000\u008f\u0092\u0003j5\u0000\u0090\u0092"+ + "\u0003P(\u0000\u0091\u0084\u0001\u0000\u0000\u0000\u0091\u0085\u0001\u0000"+ + "\u0000\u0000\u0091\u0086\u0001\u0000\u0000\u0000\u0091\u0087\u0001\u0000"+ + "\u0000\u0000\u0091\u0088\u0001\u0000\u0000\u0000\u0091\u0089\u0001\u0000"+ + "\u0000\u0000\u0091\u008a\u0001\u0000\u0000\u0000\u0091\u008b\u0001\u0000"+ + "\u0000\u0000\u0091\u008c\u0001\u0000\u0000\u0000\u0091\u008d\u0001\u0000"+ + "\u0000\u0000\u0091\u008e\u0001\u0000\u0000\u0000\u0091\u008f\u0001\u0000"+ + "\u0000\u0000\u0091\u0090\u0001\u0000\u0000\u0000\u0092\u0007\u0001\u0000"+ + "\u0000\u0000\u0093\u0094\u0005\u0013\u0000\u0000\u0094\u0095\u0003\n\u0005"+ + "\u0000\u0095\t\u0001\u0000\u0000\u0000\u0096\u0097\u0006\u0005\uffff\uffff"+ + "\u0000\u0097\u0098\u0005/\u0000\u0000\u0098\u00b3\u0003\n\u0005\u0007"+ + "\u0099\u00b3\u0003\u000e\u0007\u0000\u009a\u00b3\u0003\f\u0006\u0000\u009b"+ + "\u009d\u0003\u000e\u0007\u0000\u009c\u009e\u0005/\u0000\u0000\u009d\u009c"+ + "\u0001\u0000\u0000\u0000\u009d\u009e\u0001\u0000\u0000\u0000\u009e\u009f"+ + "\u0001\u0000\u0000\u0000\u009f\u00a0\u0005,\u0000\u0000\u00a0\u00a1\u0005"+ + "+\u0000\u0000\u00a1\u00a6\u0003\u000e\u0007\u0000\u00a2\u00a3\u0005%\u0000"+ + "\u0000\u00a3\u00a5\u0003\u000e\u0007\u0000\u00a4\u00a2\u0001\u0000\u0000"+ + "\u0000\u00a5\u00a8\u0001\u0000\u0000\u0000\u00a6\u00a4\u0001\u0000\u0000"+ + "\u0000\u00a6\u00a7\u0001\u0000\u0000\u0000\u00a7\u00a9\u0001\u0000\u0000"+ + "\u0000\u00a8\u00a6\u0001\u0000\u0000\u0000\u00a9\u00aa\u00055\u0000\u0000"+ + "\u00aa\u00b3\u0001\u0000\u0000\u0000\u00ab\u00ac\u0003\u000e\u0007\u0000"+ + "\u00ac\u00ae\u0005-\u0000\u0000\u00ad\u00af\u0005/\u0000\u0000\u00ae\u00ad"+ + "\u0001\u0000\u0000\u0000\u00ae\u00af\u0001\u0000\u0000\u0000\u00af\u00b0"+ + "\u0001\u0000\u0000\u0000\u00b0\u00b1\u00050\u0000\u0000\u00b1\u00b3\u0001"+ + "\u0000\u0000\u0000\u00b2\u0096\u0001\u0000\u0000\u0000\u00b2\u0099\u0001"+ + "\u0000\u0000\u0000\u00b2\u009a\u0001\u0000\u0000\u0000\u00b2\u009b\u0001"+ + "\u0000\u0000\u0000\u00b2\u00ab\u0001\u0000\u0000\u0000\u00b3\u00bc\u0001"+ + "\u0000\u0000\u0000\u00b4\u00b5\n\u0004\u0000\u0000\u00b5\u00b6\u0005!"+ + "\u0000\u0000\u00b6\u00bb\u0003\n\u0005\u0005\u00b7\u00b8\n\u0003\u0000"+ + "\u0000\u00b8\u00b9\u00052\u0000\u0000\u00b9\u00bb\u0003\n\u0005\u0004"+ + "\u00ba\u00b4\u0001\u0000\u0000\u0000\u00ba\u00b7\u0001\u0000\u0000\u0000"+ + "\u00bb\u00be\u0001\u0000\u0000\u0000\u00bc\u00ba\u0001\u0000\u0000\u0000"+ + "\u00bc\u00bd\u0001\u0000\u0000\u0000\u00bd\u000b\u0001\u0000\u0000\u0000"+ + "\u00be\u00bc\u0001\u0000\u0000\u0000\u00bf\u00c1\u0003\u000e\u0007\u0000"+ + "\u00c0\u00c2\u0005/\u0000\u0000\u00c1\u00c0\u0001\u0000\u0000\u0000\u00c1"+ + "\u00c2\u0001\u0000\u0000\u0000\u00c2\u00c3\u0001\u0000\u0000\u0000\u00c3"+ + "\u00c4\u0005.\u0000\u0000\u00c4\u00c5\u0003^/\u0000\u00c5\u00ce\u0001"+ + "\u0000\u0000\u0000\u00c6\u00c8\u0003\u000e\u0007\u0000\u00c7\u00c9\u0005"+ + "/\u0000\u0000\u00c8\u00c7\u0001\u0000\u0000\u0000\u00c8\u00c9\u0001\u0000"+ + "\u0000\u0000\u00c9\u00ca\u0001\u0000\u0000\u0000\u00ca\u00cb\u00054\u0000"+ + "\u0000\u00cb\u00cc\u0003^/\u0000\u00cc\u00ce\u0001\u0000\u0000\u0000\u00cd"+ + "\u00bf\u0001\u0000\u0000\u0000\u00cd\u00c6\u0001\u0000\u0000\u0000\u00ce"+ + "\r\u0001\u0000\u0000\u0000\u00cf\u00d5\u0003\u0010\b\u0000\u00d0\u00d1"+ + "\u0003\u0010\b\u0000\u00d1\u00d2\u0003`0\u0000\u00d2\u00d3\u0003\u0010"+ + "\b\u0000\u00d3\u00d5\u0001\u0000\u0000\u0000\u00d4\u00cf\u0001\u0000\u0000"+ + "\u0000\u00d4\u00d0\u0001\u0000\u0000\u0000\u00d5\u000f\u0001\u0000\u0000"+ + "\u0000\u00d6\u00d7\u0006\b\uffff\uffff\u0000\u00d7\u00db\u0003\u0012\t"+ + "\u0000\u00d8\u00d9\u0007\u0000\u0000\u0000\u00d9\u00db\u0003\u0010\b\u0003"+ + "\u00da\u00d6\u0001\u0000\u0000\u0000\u00da\u00d8\u0001\u0000\u0000\u0000"+ + "\u00db\u00e4\u0001\u0000\u0000\u0000\u00dc\u00dd\n\u0002\u0000\u0000\u00dd"+ + "\u00de\u0007\u0001\u0000\u0000\u00de\u00e3\u0003\u0010\b\u0003\u00df\u00e0"+ + "\n\u0001\u0000\u0000\u00e0\u00e1\u0007\u0000\u0000\u0000\u00e1\u00e3\u0003"+ + "\u0010\b\u0002\u00e2\u00dc\u0001\u0000\u0000\u0000\u00e2\u00df\u0001\u0000"+ + "\u0000\u0000\u00e3\u00e6\u0001\u0000\u0000\u0000\u00e4\u00e2\u0001\u0000"+ + "\u0000\u0000\u00e4\u00e5\u0001\u0000\u0000\u0000\u00e5\u0011\u0001\u0000"+ + "\u0000\u0000\u00e6\u00e4\u0001\u0000\u0000\u0000\u00e7\u00e8\u0006\t\uffff"+ + "\uffff\u0000\u00e8\u00f0\u0003<\u001e\u0000\u00e9\u00f0\u00034\u001a\u0000"+ + "\u00ea\u00f0\u0003\u0014\n\u0000\u00eb\u00ec\u0005+\u0000\u0000\u00ec"+ + "\u00ed\u0003\n\u0005\u0000\u00ed\u00ee\u00055\u0000\u0000\u00ee\u00f0"+ + "\u0001\u0000\u0000\u0000\u00ef\u00e7\u0001\u0000\u0000\u0000\u00ef\u00e9"+ + "\u0001\u0000\u0000\u0000\u00ef\u00ea\u0001\u0000\u0000\u0000\u00ef\u00eb"+ + "\u0001\u0000\u0000\u0000\u00f0\u00f6\u0001\u0000\u0000\u0000\u00f1\u00f2"+ + "\n\u0001\u0000\u0000\u00f2\u00f3\u0005$\u0000\u0000\u00f3\u00f5\u0003"+ + "\u0016\u000b\u0000\u00f4\u00f1\u0001\u0000\u0000\u0000\u00f5\u00f8\u0001"+ + "\u0000\u0000\u0000\u00f6\u00f4\u0001\u0000\u0000\u0000\u00f6\u00f7\u0001"+ + "\u0000\u0000\u0000\u00f7\u0013\u0001\u0000\u0000\u0000\u00f8\u00f6\u0001"+ + "\u0000\u0000\u0000\u00f9\u00fa\u00038\u001c\u0000\u00fa\u0104\u0005+\u0000"+ + "\u0000\u00fb\u0105\u0005@\u0000\u0000\u00fc\u0101\u0003\n\u0005\u0000"+ + "\u00fd\u00fe\u0005%\u0000\u0000\u00fe\u0100\u0003\n\u0005\u0000\u00ff"+ + "\u00fd\u0001\u0000\u0000\u0000\u0100\u0103\u0001\u0000\u0000\u0000\u0101"+ + "\u00ff\u0001\u0000\u0000\u0000\u0101\u0102\u0001\u0000\u0000\u0000\u0102"+ + "\u0105\u0001\u0000\u0000\u0000\u0103\u0101\u0001\u0000\u0000\u0000\u0104"+ + "\u00fb\u0001\u0000\u0000\u0000\u0104\u00fc\u0001\u0000\u0000\u0000\u0104"+ + "\u0105\u0001\u0000\u0000\u0000\u0105\u0106\u0001\u0000\u0000\u0000\u0106"+ + "\u0107\u00055\u0000\u0000\u0107\u0015\u0001\u0000\u0000\u0000\u0108\u0109"+ + "\u00038\u001c\u0000\u0109\u0017\u0001\u0000\u0000\u0000\u010a\u010b\u0005"+ + "\u000f\u0000\u0000\u010b\u010c\u0003\u001a\r\u0000\u010c\u0019\u0001\u0000"+ + "\u0000\u0000\u010d\u0112\u0003\u001c\u000e\u0000\u010e\u010f\u0005%\u0000"+ + "\u0000\u010f\u0111\u0003\u001c\u000e\u0000\u0110\u010e\u0001\u0000\u0000"+ + "\u0000\u0111\u0114\u0001\u0000\u0000\u0000\u0112\u0110\u0001\u0000\u0000"+ + "\u0000\u0112\u0113\u0001\u0000\u0000\u0000\u0113\u001b\u0001\u0000\u0000"+ + "\u0000\u0114\u0112\u0001\u0000\u0000\u0000\u0115\u011b\u0003\n\u0005\u0000"+ + "\u0116\u0117\u00034\u001a\u0000\u0117\u0118\u0005#\u0000\u0000\u0118\u0119"+ + "\u0003\n\u0005\u0000\u0119\u011b\u0001\u0000\u0000\u0000\u011a\u0115\u0001"+ + "\u0000\u0000\u0000\u011a\u0116\u0001\u0000\u0000\u0000\u011b\u001d\u0001"+ + "\u0000\u0000\u0000\u011c\u011d\u0005\u0006\u0000\u0000\u011d\u0122\u0003"+ + " \u0010\u0000\u011e\u011f\u0005%\u0000\u0000\u011f\u0121\u0003 \u0010"+ + "\u0000\u0120\u011e\u0001\u0000\u0000\u0000\u0121\u0124\u0001\u0000\u0000"+ + "\u0000\u0122\u0120\u0001\u0000\u0000\u0000\u0122\u0123\u0001\u0000\u0000"+ + "\u0000\u0123\u0126\u0001\u0000\u0000\u0000\u0124\u0122\u0001\u0000\u0000"+ + "\u0000\u0125\u0127\u0003&\u0013\u0000\u0126\u0125\u0001\u0000\u0000\u0000"+ + "\u0126\u0127\u0001\u0000\u0000\u0000\u0127\u0129\u0001\u0000\u0000\u0000"+ + "\u0128\u012a\u0003\"\u0011\u0000\u0129\u0128\u0001\u0000\u0000\u0000\u0129"+ + "\u012a\u0001\u0000\u0000\u0000\u012a\u001f\u0001\u0000\u0000\u0000\u012b"+ + "\u012c\u0005\u0018\u0000\u0000\u012c!\u0001\u0000\u0000\u0000\u012d\u012e"+ + "\u0005J\u0000\u0000\u012e\u0133\u0003$\u0012\u0000\u012f\u0130\u0005%"+ + "\u0000\u0000\u0130\u0132\u0003$\u0012\u0000\u0131\u012f\u0001\u0000\u0000"+ + "\u0000\u0132\u0135\u0001\u0000\u0000\u0000\u0133\u0131\u0001\u0000\u0000"+ + "\u0000\u0133\u0134\u0001\u0000\u0000\u0000\u0134#\u0001\u0000\u0000\u0000"+ + "\u0135\u0133\u0001\u0000\u0000\u0000\u0136\u0137\u0003^/\u0000\u0137\u0138"+ + "\u0005#\u0000\u0000\u0138\u0139\u0003^/\u0000\u0139%\u0001\u0000\u0000"+ + "\u0000\u013a\u013d\u0003(\u0014\u0000\u013b\u013d\u0003*\u0015\u0000\u013c"+ + "\u013a\u0001\u0000\u0000\u0000\u013c\u013b\u0001\u0000\u0000\u0000\u013d"+ + "\'\u0001\u0000\u0000\u0000\u013e\u013f\u0005K\u0000\u0000\u013f\u0144"+ + "\u0003 \u0010\u0000\u0140\u0141\u0005%\u0000\u0000\u0141\u0143\u0003 "+ + "\u0010\u0000\u0142\u0140\u0001\u0000\u0000\u0000\u0143\u0146\u0001\u0000"+ + "\u0000\u0000\u0144\u0142\u0001\u0000\u0000\u0000\u0144\u0145\u0001\u0000"+ + "\u0000\u0000\u0145)\u0001\u0000\u0000\u0000\u0146\u0144\u0001\u0000\u0000"+ + "\u0000\u0147\u0148\u0005C\u0000\u0000\u0148\u0149\u0003(\u0014\u0000\u0149"+ + "\u014a\u0005D\u0000\u0000\u014a+\u0001\u0000\u0000\u0000\u014b\u014c\u0005"+ + "\f\u0000\u0000\u014c\u0151\u0003 \u0010\u0000\u014d\u014e\u0005%\u0000"+ + "\u0000\u014e\u0150\u0003 \u0010\u0000\u014f\u014d\u0001\u0000\u0000\u0000"+ + "\u0150\u0153\u0001\u0000\u0000\u0000\u0151\u014f\u0001\u0000\u0000\u0000"+ + "\u0151\u0152\u0001\u0000\u0000\u0000\u0152\u0155\u0001\u0000\u0000\u0000"+ + "\u0153\u0151\u0001\u0000\u0000\u0000\u0154\u0156\u0003\u001a\r\u0000\u0155"+ + "\u0154\u0001\u0000\u0000\u0000\u0155\u0156\u0001\u0000\u0000\u0000\u0156"+ + "\u0159\u0001\u0000\u0000\u0000\u0157\u0158\u0005 \u0000\u0000\u0158\u015a"+ + "\u0003\u001a\r\u0000\u0159\u0157\u0001\u0000\u0000\u0000\u0159\u015a\u0001"+ + "\u0000\u0000\u0000\u015a-\u0001\u0000\u0000\u0000\u015b\u015c\u0005\u0004"+ + "\u0000\u0000\u015c\u015d\u0003\u001a\r\u0000\u015d/\u0001\u0000\u0000"+ + "\u0000\u015e\u0160\u0005\u0012\u0000\u0000\u015f\u0161\u0003\u001a\r\u0000"+ + "\u0160\u015f\u0001\u0000\u0000\u0000\u0160\u0161\u0001\u0000\u0000\u0000"+ + "\u0161\u0164\u0001\u0000\u0000\u0000\u0162\u0163\u0005 \u0000\u0000\u0163"+ + "\u0165\u0003\u001a\r\u0000\u0164\u0162\u0001\u0000\u0000\u0000\u0164\u0165"+ + "\u0001\u0000\u0000\u0000\u01651\u0001\u0000\u0000\u0000\u0166\u0167\u0005"+ + "\b\u0000\u0000\u0167\u016a\u0003\u001a\r\u0000\u0168\u0169\u0005 \u0000"+ + "\u0000\u0169\u016b\u0003\u001a\r\u0000\u016a\u0168\u0001\u0000\u0000\u0000"+ + "\u016a\u016b\u0001\u0000\u0000\u0000\u016b3\u0001\u0000\u0000\u0000\u016c"+ + "\u0171\u00038\u001c\u0000\u016d\u016e\u0005\'\u0000\u0000\u016e\u0170"+ + "\u00038\u001c\u0000\u016f\u016d\u0001\u0000\u0000\u0000\u0170\u0173\u0001"+ + "\u0000\u0000\u0000\u0171\u016f\u0001\u0000\u0000\u0000\u0171\u0172\u0001"+ + "\u0000\u0000\u0000\u01725\u0001\u0000\u0000\u0000\u0173\u0171\u0001\u0000"+ + "\u0000\u0000\u0174\u0179\u0003:\u001d\u0000\u0175\u0176\u0005\'\u0000"+ + "\u0000\u0176\u0178\u0003:\u001d\u0000\u0177\u0175\u0001\u0000\u0000\u0000"+ + "\u0178\u017b\u0001\u0000\u0000\u0000\u0179\u0177\u0001\u0000\u0000\u0000"+ + "\u0179\u017a\u0001\u0000\u0000\u0000\u017a7\u0001\u0000\u0000\u0000\u017b"+ + "\u0179\u0001\u0000\u0000\u0000\u017c\u017d\u0007\u0002\u0000\u0000\u017d"+ + "9\u0001\u0000\u0000\u0000\u017e\u017f\u0005O\u0000\u0000\u017f;\u0001"+ + "\u0000\u0000\u0000\u0180\u01ab\u00050\u0000\u0000\u0181\u0182\u0003\\"+ + ".\u0000\u0182\u0183\u0005E\u0000\u0000\u0183\u01ab\u0001\u0000\u0000\u0000"+ + "\u0184\u01ab\u0003Z-\u0000\u0185\u01ab\u0003\\.\u0000\u0186\u01ab\u0003"+ + "V+\u0000\u0187\u01ab\u00053\u0000\u0000\u0188\u01ab\u0003^/\u0000\u0189"+ + "\u018a\u0005C\u0000\u0000\u018a\u018f\u0003X,\u0000\u018b\u018c\u0005"+ + "%\u0000\u0000\u018c\u018e\u0003X,\u0000\u018d\u018b\u0001\u0000\u0000"+ + "\u0000\u018e\u0191\u0001\u0000\u0000\u0000\u018f\u018d\u0001\u0000\u0000"+ + "\u0000\u018f\u0190\u0001\u0000\u0000\u0000\u0190\u0192\u0001\u0000\u0000"+ + "\u0000\u0191\u018f\u0001\u0000\u0000\u0000\u0192\u0193\u0005D\u0000\u0000"+ + "\u0193\u01ab\u0001\u0000\u0000\u0000\u0194\u0195\u0005C\u0000\u0000\u0195"+ + "\u019a\u0003V+\u0000\u0196\u0197\u0005%\u0000\u0000\u0197\u0199\u0003"+ + "V+\u0000\u0198\u0196\u0001\u0000\u0000\u0000\u0199\u019c\u0001\u0000\u0000"+ + "\u0000\u019a\u0198\u0001\u0000\u0000\u0000\u019a\u019b\u0001\u0000\u0000"+ + "\u0000\u019b\u019d\u0001\u0000\u0000\u0000\u019c\u019a\u0001\u0000\u0000"+ + "\u0000\u019d\u019e\u0005D\u0000\u0000\u019e\u01ab\u0001\u0000\u0000\u0000"+ + "\u019f\u01a0\u0005C\u0000\u0000\u01a0\u01a5\u0003^/\u0000\u01a1\u01a2"+ + "\u0005%\u0000\u0000\u01a2\u01a4\u0003^/\u0000\u01a3\u01a1\u0001\u0000"+ + "\u0000\u0000\u01a4\u01a7\u0001\u0000\u0000\u0000\u01a5\u01a3\u0001\u0000"+ + "\u0000\u0000\u01a5\u01a6\u0001\u0000\u0000\u0000\u01a6\u01a8\u0001\u0000"+ + "\u0000\u0000\u01a7\u01a5\u0001\u0000\u0000\u0000\u01a8\u01a9\u0005D\u0000"+ + "\u0000\u01a9\u01ab\u0001\u0000\u0000\u0000\u01aa\u0180\u0001\u0000\u0000"+ + "\u0000\u01aa\u0181\u0001\u0000\u0000\u0000\u01aa\u0184\u0001\u0000\u0000"+ + "\u0000\u01aa\u0185\u0001\u0000\u0000\u0000\u01aa\u0186\u0001\u0000\u0000"+ + "\u0000\u01aa\u0187\u0001\u0000\u0000\u0000\u01aa\u0188\u0001\u0000\u0000"+ + "\u0000\u01aa\u0189\u0001\u0000\u0000\u0000\u01aa\u0194\u0001\u0000\u0000"+ + "\u0000\u01aa\u019f\u0001\u0000\u0000\u0000\u01ab=\u0001\u0000\u0000\u0000"+ + "\u01ac\u01ad\u0005\n\u0000\u0000\u01ad\u01ae\u0005\u001e\u0000\u0000\u01ae"+ + "?\u0001\u0000\u0000\u0000\u01af\u01b0\u0005\u0011\u0000\u0000\u01b0\u01b5"+ + "\u0003B!\u0000\u01b1\u01b2\u0005%\u0000\u0000\u01b2\u01b4\u0003B!\u0000"+ + "\u01b3\u01b1\u0001\u0000\u0000\u0000\u01b4\u01b7\u0001\u0000\u0000\u0000"+ + "\u01b5\u01b3\u0001\u0000\u0000\u0000\u01b5\u01b6\u0001\u0000\u0000\u0000"+ + "\u01b6A\u0001\u0000\u0000\u0000\u01b7\u01b5\u0001\u0000\u0000\u0000\u01b8"+ + "\u01ba\u0003\n\u0005\u0000\u01b9\u01bb\u0007\u0003\u0000\u0000\u01ba\u01b9"+ + "\u0001\u0000\u0000\u0000\u01ba\u01bb\u0001\u0000\u0000\u0000\u01bb\u01be"+ + "\u0001\u0000\u0000\u0000\u01bc\u01bd\u00051\u0000\u0000\u01bd\u01bf\u0007"+ + "\u0004\u0000\u0000\u01be\u01bc\u0001\u0000\u0000\u0000\u01be\u01bf\u0001"+ + "\u0000\u0000\u0000\u01bfC\u0001\u0000\u0000\u0000\u01c0\u01c1\u0005\t"+ + "\u0000\u0000\u01c1\u01c6\u00036\u001b\u0000\u01c2\u01c3\u0005%\u0000\u0000"+ + "\u01c3\u01c5\u00036\u001b\u0000\u01c4\u01c2\u0001\u0000\u0000\u0000\u01c5"+ + "\u01c8\u0001\u0000\u0000\u0000\u01c6\u01c4\u0001\u0000\u0000\u0000\u01c6"+ + "\u01c7\u0001\u0000\u0000\u0000\u01c7E\u0001\u0000\u0000\u0000\u01c8\u01c6"+ + "\u0001\u0000\u0000\u0000\u01c9\u01ca\u0005\u0002\u0000\u0000\u01ca\u01cf"+ + "\u00036\u001b\u0000\u01cb\u01cc\u0005%\u0000\u0000\u01cc\u01ce\u00036"+ + "\u001b\u0000\u01cd\u01cb\u0001\u0000\u0000\u0000\u01ce\u01d1\u0001\u0000"+ + "\u0000\u0000\u01cf\u01cd\u0001\u0000\u0000\u0000\u01cf\u01d0\u0001\u0000"+ + "\u0000\u0000\u01d0G\u0001\u0000\u0000\u0000\u01d1\u01cf\u0001\u0000\u0000"+ + "\u0000\u01d2\u01d3\u0005\u000e\u0000\u0000\u01d3\u01d8\u0003J%\u0000\u01d4"+ + "\u01d5\u0005%\u0000\u0000\u01d5\u01d7\u0003J%\u0000\u01d6\u01d4\u0001"+ + "\u0000\u0000\u0000\u01d7\u01da\u0001\u0000\u0000\u0000\u01d8\u01d6\u0001"+ + "\u0000\u0000\u0000\u01d8\u01d9\u0001\u0000\u0000\u0000\u01d9I\u0001\u0000"+ + "\u0000\u0000\u01da\u01d8\u0001\u0000\u0000\u0000\u01db\u01dc\u00036\u001b"+ + "\u0000\u01dc\u01dd\u0005S\u0000\u0000\u01dd\u01de\u00036\u001b\u0000\u01de"+ + "K\u0001\u0000\u0000\u0000\u01df\u01e0\u0005\u0001\u0000\u0000\u01e0\u01e1"+ + "\u0003\u0012\t\u0000\u01e1\u01e3\u0003^/\u0000\u01e2\u01e4\u0003R)\u0000"+ + "\u01e3\u01e2\u0001\u0000\u0000\u0000\u01e3\u01e4\u0001\u0000\u0000\u0000"+ + "\u01e4M\u0001\u0000\u0000\u0000\u01e5\u01e6\u0005\u0007\u0000\u0000\u01e6"+ + "\u01e7\u0003\u0012\t\u0000\u01e7\u01e8\u0003^/\u0000\u01e8O\u0001\u0000"+ + "\u0000\u0000\u01e9\u01ea\u0005\r\u0000\u0000\u01ea\u01eb\u00034\u001a"+ + "\u0000\u01ebQ\u0001\u0000\u0000\u0000\u01ec\u01f1\u0003T*\u0000\u01ed"+ + "\u01ee\u0005%\u0000\u0000\u01ee\u01f0\u0003T*\u0000\u01ef\u01ed\u0001"+ + "\u0000\u0000\u0000\u01f0\u01f3\u0001\u0000\u0000\u0000\u01f1\u01ef\u0001"+ + "\u0000\u0000\u0000\u01f1\u01f2\u0001\u0000\u0000\u0000\u01f2S\u0001\u0000"+ + "\u0000\u0000\u01f3\u01f1\u0001\u0000\u0000\u0000\u01f4\u01f5\u00038\u001c"+ + "\u0000\u01f5\u01f6\u0005#\u0000\u0000\u01f6\u01f7\u0003<\u001e\u0000\u01f7"+ + "U\u0001\u0000\u0000\u0000\u01f8\u01f9\u0007\u0005\u0000\u0000\u01f9W\u0001"+ + "\u0000\u0000\u0000\u01fa\u01fd\u0003Z-\u0000\u01fb\u01fd\u0003\\.\u0000"+ + "\u01fc\u01fa\u0001\u0000\u0000\u0000\u01fc\u01fb\u0001\u0000\u0000\u0000"+ + "\u01fdY\u0001\u0000\u0000\u0000\u01fe\u0200\u0007\u0000\u0000\u0000\u01ff"+ + "\u01fe\u0001\u0000\u0000\u0000\u01ff\u0200\u0001\u0000\u0000\u0000\u0200"+ + "\u0201\u0001\u0000\u0000\u0000\u0201\u0202\u0005\u001f\u0000\u0000\u0202"+ + "[\u0001\u0000\u0000\u0000\u0203\u0205\u0007\u0000\u0000\u0000\u0204\u0203"+ + "\u0001\u0000\u0000\u0000\u0204\u0205\u0001\u0000\u0000\u0000\u0205\u0206"+ + "\u0001\u0000\u0000\u0000\u0206\u0207\u0005\u001e\u0000\u0000\u0207]\u0001"+ + "\u0000\u0000\u0000\u0208\u0209\u0005\u001d\u0000\u0000\u0209_\u0001\u0000"+ + "\u0000\u0000\u020a\u020b\u0007\u0006\u0000\u0000\u020ba\u0001\u0000\u0000"+ + "\u0000\u020c\u020d\u0005\u0005\u0000\u0000\u020d\u020e\u0003d2\u0000\u020e"+ + "c\u0001\u0000\u0000\u0000\u020f\u0210\u0005C\u0000\u0000\u0210\u0211\u0003"+ + "\u0002\u0001\u0000\u0211\u0212\u0005D\u0000\u0000\u0212e\u0001\u0000\u0000"+ + "\u0000\u0213\u0214\u0005\u0010\u0000\u0000\u0214\u0215\u0005c\u0000\u0000"+ + "\u0215g\u0001\u0000\u0000\u0000\u0216\u0217\u0005\u000b\u0000\u0000\u0217"+ + "\u0218\u0005g\u0000\u0000\u0218i\u0001\u0000\u0000\u0000\u0219\u021a\u0005"+ + "\u0003\u0000\u0000\u021a\u021d\u0005Y\u0000\u0000\u021b\u021c\u0005W\u0000"+ + "\u0000\u021c\u021e\u00036\u001b\u0000\u021d\u021b\u0001\u0000\u0000\u0000"+ + "\u021d\u021e\u0001\u0000\u0000\u0000\u021e\u0228\u0001\u0000\u0000\u0000"+ + "\u021f\u0220\u0005X\u0000\u0000\u0220\u0225\u0003l6\u0000\u0221\u0222"+ + "\u0005%\u0000\u0000\u0222\u0224\u0003l6\u0000\u0223\u0221\u0001\u0000"+ + "\u0000\u0000\u0224\u0227\u0001\u0000\u0000\u0000\u0225\u0223\u0001\u0000"+ + "\u0000\u0000\u0225\u0226\u0001\u0000\u0000\u0000\u0226\u0229\u0001\u0000"+ + "\u0000\u0000\u0227\u0225\u0001\u0000\u0000\u0000\u0228\u021f\u0001\u0000"+ + "\u0000\u0000\u0228\u0229\u0001\u0000\u0000\u0000\u0229k\u0001\u0000\u0000"+ + "\u0000\u022a\u022b\u00036\u001b\u0000\u022b\u022c\u0005#\u0000\u0000\u022c"+ + "\u022e\u0001\u0000\u0000\u0000\u022d\u022a\u0001\u0000\u0000\u0000\u022d"+ + "\u022e\u0001\u0000\u0000\u0000\u022e\u022f\u0001\u0000\u0000\u0000\u022f"+ + "\u0230\u00036\u001b\u0000\u0230m\u0001\u0000\u0000\u00007y\u0082\u0091"+ + "\u009d\u00a6\u00ae\u00b2\u00ba\u00bc\u00c1\u00c8\u00cd\u00d4\u00da\u00e2"+ + "\u00e4\u00ef\u00f6\u0101\u0104\u0112\u011a\u0122\u0126\u0129\u0133\u013c"+ + "\u0144\u0151\u0155\u0159\u0160\u0164\u016a\u0171\u0179\u018f\u019a\u01a5"+ + "\u01aa\u01b5\u01ba\u01be\u01c6\u01cf\u01d8\u01e3\u01f1\u01fc\u01ff\u0204"+ + "\u021d\u0225\u0228\u022d"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java index 5122eb07371b1..92c9793fd8d9a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java @@ -365,13 +365,13 @@ public class EsqlBaseParserBaseListener implements EsqlBaseParserListener { * *

The default implementation does nothing.

*/ - @Override public void enterFromIdentifier(EsqlBaseParser.FromIdentifierContext ctx) { } + @Override public void enterIndexIdentifier(EsqlBaseParser.IndexIdentifierContext ctx) { } /** * {@inheritDoc} * *

The default implementation does nothing.

*/ - @Override public void exitFromIdentifier(EsqlBaseParser.FromIdentifierContext ctx) { } + @Override public void exitIndexIdentifier(EsqlBaseParser.IndexIdentifierContext ctx) { } /** * {@inheritDoc} * @@ -432,6 +432,18 @@ public class EsqlBaseParserBaseListener implements EsqlBaseParserListener { *

The default implementation does nothing.

*/ @Override public void exitDeprecated_metadata(EsqlBaseParser.Deprecated_metadataContext ctx) { } + /** + * {@inheritDoc} + * + *

The default implementation does nothing.

+ */ + @Override public void enterMetricsCommand(EsqlBaseParser.MetricsCommandContext ctx) { } + /** + * {@inheritDoc} + * + *

The default implementation does nothing.

+ */ + @Override public void exitMetricsCommand(EsqlBaseParser.MetricsCommandContext ctx) { } /** * {@inheritDoc} * diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java index a32ac9bd9100c..25eb59648fe6f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java @@ -221,7 +221,7 @@ public class EsqlBaseParserBaseVisitor extends AbstractParseTreeVisitor im *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitFromIdentifier(EsqlBaseParser.FromIdentifierContext ctx) { return visitChildren(ctx); } + @Override public T visitIndexIdentifier(EsqlBaseParser.IndexIdentifierContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * @@ -257,6 +257,13 @@ public class EsqlBaseParserBaseVisitor extends AbstractParseTreeVisitor im * {@link #visitChildren} on {@code ctx}.

*/ @Override public T visitDeprecated_metadata(EsqlBaseParser.Deprecated_metadataContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitMetricsCommand(EsqlBaseParser.MetricsCommandContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java index 6e8000f7fcf8e..ac4047ffbd22f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java @@ -336,15 +336,15 @@ public interface EsqlBaseParserListener extends ParseTreeListener { */ void exitFromCommand(EsqlBaseParser.FromCommandContext ctx); /** - * Enter a parse tree produced by {@link EsqlBaseParser#fromIdentifier}. + * Enter a parse tree produced by {@link EsqlBaseParser#indexIdentifier}. * @param ctx the parse tree */ - void enterFromIdentifier(EsqlBaseParser.FromIdentifierContext ctx); + void enterIndexIdentifier(EsqlBaseParser.IndexIdentifierContext ctx); /** - * Exit a parse tree produced by {@link EsqlBaseParser#fromIdentifier}. + * Exit a parse tree produced by {@link EsqlBaseParser#indexIdentifier}. * @param ctx the parse tree */ - void exitFromIdentifier(EsqlBaseParser.FromIdentifierContext ctx); + void exitIndexIdentifier(EsqlBaseParser.IndexIdentifierContext ctx); /** * Enter a parse tree produced by {@link EsqlBaseParser#fromOptions}. * @param ctx the parse tree @@ -395,6 +395,16 @@ public interface EsqlBaseParserListener extends ParseTreeListener { * @param ctx the parse tree */ void exitDeprecated_metadata(EsqlBaseParser.Deprecated_metadataContext ctx); + /** + * Enter a parse tree produced by {@link EsqlBaseParser#metricsCommand}. + * @param ctx the parse tree + */ + void enterMetricsCommand(EsqlBaseParser.MetricsCommandContext ctx); + /** + * Exit a parse tree produced by {@link EsqlBaseParser#metricsCommand}. + * @param ctx the parse tree + */ + void exitMetricsCommand(EsqlBaseParser.MetricsCommandContext ctx); /** * Enter a parse tree produced by {@link EsqlBaseParser#evalCommand}. * @param ctx the parse tree diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java index d6e83b37a0f39..37b94cd585c11 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java @@ -204,11 +204,11 @@ public interface EsqlBaseParserVisitor extends ParseTreeVisitor { */ T visitFromCommand(EsqlBaseParser.FromCommandContext ctx); /** - * Visit a parse tree produced by {@link EsqlBaseParser#fromIdentifier}. + * Visit a parse tree produced by {@link EsqlBaseParser#indexIdentifier}. * @param ctx the parse tree * @return the visitor result */ - T visitFromIdentifier(EsqlBaseParser.FromIdentifierContext ctx); + T visitIndexIdentifier(EsqlBaseParser.IndexIdentifierContext ctx); /** * Visit a parse tree produced by {@link EsqlBaseParser#fromOptions}. * @param ctx the parse tree @@ -239,6 +239,12 @@ public interface EsqlBaseParserVisitor extends ParseTreeVisitor { * @return the visitor result */ T visitDeprecated_metadata(EsqlBaseParser.Deprecated_metadataContext ctx); + /** + * Visit a parse tree produced by {@link EsqlBaseParser#metricsCommand}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitMetricsCommand(EsqlBaseParser.MetricsCommandContext ctx); /** * Visit a parse tree produced by {@link EsqlBaseParser#evalCommand}. * @param ctx the parse tree diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/IdentifierBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/IdentifierBuilder.java index 67f8eb407ee11..b5e348589fa7b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/IdentifierBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/IdentifierBuilder.java @@ -9,8 +9,8 @@ import org.antlr.v4.runtime.tree.TerminalNode; import org.elasticsearch.common.Strings; -import org.elasticsearch.xpack.esql.parser.EsqlBaseParser.FromIdentifierContext; import org.elasticsearch.xpack.esql.parser.EsqlBaseParser.IdentifierContext; +import org.elasticsearch.xpack.esql.parser.EsqlBaseParser.IndexIdentifierContext; import java.util.List; @@ -24,8 +24,8 @@ public String visitIdentifier(IdentifierContext ctx) { } @Override - public String visitFromIdentifier(FromIdentifierContext ctx) { - return ctx == null ? null : unquoteIdentifier(ctx.QUOTED_IDENTIFIER(), ctx.FROM_UNQUOTED_IDENTIFIER()); + public String visitIndexIdentifier(IndexIdentifierContext ctx) { + return ctx == null ? null : unquoteIdentifier(null, ctx.INDEX_UNQUOTED_IDENTIFIER()); } protected static String unquoteIdentifier(TerminalNode quotedNode, TerminalNode unquotedNode) { @@ -42,7 +42,7 @@ protected static String unquoteIdString(String quotedString) { return quotedString.substring(1, quotedString.length() - 1).replace("``", "`"); } - public String visitFromIdentifiers(List ctx) { + public String visitIndexIdentifiers(List ctx) { return Strings.collectionToDelimitedString(visitList(this, ctx, String.class), ","); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java index aea835c11ad3d..b8fc29e4ef64d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java @@ -10,6 +10,7 @@ import org.antlr.v4.runtime.ParserRuleContext; import org.antlr.v4.runtime.Token; import org.antlr.v4.runtime.tree.ParseTree; +import org.elasticsearch.Build; import org.elasticsearch.core.Tuple; import org.elasticsearch.dissect.DissectException; import org.elasticsearch.dissect.DissectParser; @@ -205,7 +206,7 @@ public LogicalPlan visitRowCommand(EsqlBaseParser.RowCommandContext ctx) { @Override public LogicalPlan visitFromCommand(EsqlBaseParser.FromCommandContext ctx) { Source source = source(ctx); - TableIdentifier table = new TableIdentifier(source, null, visitFromIdentifiers(ctx.fromIdentifier())); + TableIdentifier table = new TableIdentifier(source, null, visitIndexIdentifiers(ctx.indexIdentifier())); Map metadataMap = new LinkedHashMap<>(); if (ctx.metadata() != null) { var deprecatedContext = ctx.metadata().deprecated_metadata(); @@ -222,8 +223,8 @@ public LogicalPlan visitFromCommand(EsqlBaseParser.FromCommandContext ctx) { metadataOptionContext = ctx.metadata().metadataOption(); } - for (var c : metadataOptionContext.fromIdentifier()) { - String id = visitFromIdentifier(c); + for (var c : metadataOptionContext.indexIdentifier()) { + String id = visitIndexIdentifier(c); Source src = source(c); if (MetadataAttribute.isSupported(id) == false) { throw new ParsingException(src, "unsupported metadata field [" + id + "]"); @@ -253,10 +254,19 @@ public LogicalPlan visitFromCommand(EsqlBaseParser.FromCommandContext ctx) { @Override public PlanFactory visitStatsCommand(EsqlBaseParser.StatsCommandContext ctx) { - List aggregates = new ArrayList<>(visitFields(ctx.stats)); - List groupings = visitGrouping(ctx.grouping); + final Stats stats = stats(source(ctx), ctx.grouping, ctx.stats); + return input -> new EsqlAggregate(source(ctx), input, stats.groupings, stats.aggregates); + } + + private record Stats(List groupings, List aggregates) { + + } + + private Stats stats(Source source, EsqlBaseParser.FieldsContext groupingsCtx, EsqlBaseParser.FieldsContext aggregatesCtx) { + List groupings = visitGrouping(groupingsCtx); + List aggregates = new ArrayList<>(visitFields(aggregatesCtx)); if (aggregates.isEmpty() && groupings.isEmpty()) { - throw new ParsingException(source(ctx), "At least one aggregation or grouping expression required in [{}]", ctx.getText()); + throw new ParsingException(source, "At least one aggregation or grouping expression required in [{}]", source.text()); } // grouping keys are automatically added as aggregations however the user is not allowed to specify them if (groupings.isEmpty() == false && aggregates.isEmpty() == false) { @@ -279,8 +289,7 @@ public PlanFactory visitStatsCommand(EsqlBaseParser.StatsCommandContext ctx) { for (Expression group : groupings) { aggregates.add(Expressions.attribute(group)); } - - return input -> new EsqlAggregate(source(ctx), input, new ArrayList<>(groupings), aggregates); + return new Stats(new ArrayList<>(groupings), aggregates); } private void fail(Expression exp, String message, Object... args) { @@ -427,5 +436,20 @@ private static Tuple parsePolicyName(Token policyToken) { return new Tuple<>(mode, policyName); } + @Override + public LogicalPlan visitMetricsCommand(EsqlBaseParser.MetricsCommandContext ctx) { + if (Build.current().isSnapshot() == false) { + throw new IllegalArgumentException("METRICS command currently requires a snapshot build"); + } + Source source = source(ctx); + TableIdentifier table = new TableIdentifier(source, null, visitIndexIdentifiers(ctx.indexIdentifier())); + var unresolvedRelation = new EsqlUnresolvedRelation(source, table, List.of()); + if (ctx.aggregates == null && ctx.grouping == null) { + return unresolvedRelation; + } + final Stats stats = stats(source, ctx.grouping, ctx.aggregates); + return new EsqlAggregate(source, unresolvedRelation, stats.groupings, stats.aggregates); + } + interface PlanFactory extends Function {} } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/local/ImmediateLocalSupplier.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/local/ImmediateLocalSupplier.java new file mode 100644 index 0000000000000..8bcf5c472b2d0 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/local/ImmediateLocalSupplier.java @@ -0,0 +1,55 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.plan.logical.local; + +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; + +import java.io.IOException; +import java.util.Arrays; + +/** + * A {@link LocalSupplier} that contains already filled {@link Block}s. + */ +class ImmediateLocalSupplier implements LocalSupplier { + private final Block[] blocks; + + ImmediateLocalSupplier(Block[] blocks) { + this.blocks = blocks; + } + + @Override + public Block[] get() { + return blocks; + } + + @Override + public String toString() { + return Arrays.toString(blocks); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeArray((o, v) -> ((PlanStreamOutput) o).writeCachedBlock(v), blocks); + } + + @Override + public boolean equals(Object obj) { + if (obj == null || obj.getClass() != getClass()) { + return false; + } + ImmediateLocalSupplier other = (ImmediateLocalSupplier) obj; + return Arrays.equals(blocks, other.blocks); + } + + @Override + public int hashCode() { + return Arrays.hashCode(blocks); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalSupplier.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalSupplier.java index 7fa82359ffc45..3b81da06d7077 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalSupplier.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalSupplier.java @@ -7,13 +7,25 @@ package org.elasticsearch.xpack.esql.plan.logical.local; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockUtils; +import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; -import java.util.Arrays; +import java.io.IOException; import java.util.function.Supplier; -public interface LocalSupplier extends Supplier { +/** + * Supplies fixed {@link Block}s for things calculated at plan time. + *

+ * This is {@link Writeable} so we can model {@code LOOKUP} and + * hash joins which have to go over the wire. But many implementers + * don't have to go over the wire and they should feel free to throw + * {@link UnsupportedOperationException}. + *

+ */ +public interface LocalSupplier extends Supplier, Writeable { LocalSupplier EMPTY = new LocalSupplier() { @Override @@ -25,19 +37,29 @@ public Block[] get() { public String toString() { return "EMPTY"; } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVInt(0); + } + + @Override + public boolean equals(Object obj) { + return obj == EMPTY; + } + + @Override + public int hashCode() { + return 0; + } }; static LocalSupplier of(Block[] blocks) { - return new LocalSupplier() { - @Override - public Block[] get() { - return blocks; - } - - @Override - public String toString() { - return Arrays.toString(blocks); - } - }; + return new ImmediateLocalSupplier(blocks); + } + + static LocalSupplier readFrom(PlanStreamInput in) throws IOException { + Block[] blocks = in.readCachedBlockArray(); + return blocks.length == 0 ? EMPTY : of(blocks); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/AggregateExec.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/AggregateExec.java index 9feb5e9b009d1..490ec174eea5a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/AggregateExec.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/AggregateExec.java @@ -68,6 +68,10 @@ public List aggregates() { return aggregates; } + public AggregateExec withMode(Mode newMode) { + return new AggregateExec(source(), child(), groupings, aggregates, newMode, estimatedRowSize); + } + /** * Estimate of the number of bytes that'll be loaded per position before * the stream of pages is consumed. diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java index 8c5392ccc1781..f5e4dead67347 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java @@ -20,6 +20,7 @@ import org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateFunction; import org.elasticsearch.xpack.esql.expression.function.aggregate.Count; import org.elasticsearch.xpack.esql.plan.physical.AggregateExec; +import org.elasticsearch.xpack.esql.plan.physical.ExchangeSourceExec; import org.elasticsearch.xpack.esql.planner.LocalExecutionPlanner.LocalExecutionPlannerContext; import org.elasticsearch.xpack.esql.planner.LocalExecutionPlanner.PhysicalOperation; import org.elasticsearch.xpack.ql.InvalidArgumentException; @@ -54,6 +55,20 @@ public final PhysicalOperation groupingPhysicalOperation( var aggregates = aggregateExec.aggregates(); var sourceLayout = source.layout; + AggregatorMode aggregatorMode; + + if (mode == AggregateExec.Mode.FINAL) { + aggregatorMode = AggregatorMode.FINAL; + } else if (mode == AggregateExec.Mode.PARTIAL) { + if (aggregateExec.child() instanceof ExchangeSourceExec) {// the reducer step at data node (local) level + aggregatorMode = AggregatorMode.INTERMEDIATE; + } else { + aggregatorMode = AggregatorMode.INITIAL; + } + } else { + assert false : "Invalid aggregator mode [" + mode + "]"; + aggregatorMode = AggregatorMode.SINGLE; + } if (aggregateExec.groupings().isEmpty()) { // not grouping @@ -65,20 +80,18 @@ public final PhysicalOperation groupingPhysicalOperation( } else { layout.append(aggregateMapper.mapNonGrouping(aggregates)); } + // create the agg factories aggregatesToFactory( aggregates, - mode, + aggregatorMode, sourceLayout, false, // non-grouping s -> aggregatorFactories.add(s.supplier.aggregatorFactory(s.mode)) ); if (aggregatorFactories.isEmpty() == false) { - operatorFactory = new AggregationOperator.AggregationOperatorFactory( - aggregatorFactories, - mode == AggregateExec.Mode.FINAL ? AggregatorMode.FINAL : AggregatorMode.INITIAL - ); + operatorFactory = new AggregationOperator.AggregationOperatorFactory(aggregatorFactories, aggregatorMode); } } else { // grouping @@ -136,7 +149,7 @@ else if (mode == AggregateExec.Mode.PARTIAL) { // create the agg factories aggregatesToFactory( aggregates, - mode, + aggregatorMode, sourceLayout, true, // grouping s -> aggregatorFactories.add(s.supplier.groupingAggregatorFactory(s.mode)) @@ -219,7 +232,7 @@ private record AggFunctionSupplierContext(AggregatorFunctionSupplier supplier, A private void aggregatesToFactory( List aggregates, - AggregateExec.Mode mode, + AggregatorMode mode, Layout layout, boolean grouping, Consumer consumer @@ -228,11 +241,9 @@ private void aggregatesToFactory( if (ne instanceof Alias alias) { var child = alias.child(); if (child instanceof AggregateFunction aggregateFunction) { - AggregatorMode aggMode = null; List sourceAttr; - if (mode == AggregateExec.Mode.PARTIAL) { - aggMode = AggregatorMode.INITIAL; + if (mode == AggregatorMode.INITIAL) { // TODO: this needs to be made more reliable - use casting to blow up when dealing with expressions (e+1) Expression field = aggregateFunction.field(); // Only count can now support literals - all the other aggs should be optimized away @@ -257,9 +268,7 @@ private void aggregatesToFactory( } sourceAttr = List.of(attr); } - - } else if (mode == AggregateExec.Mode.FINAL) { - aggMode = AggregatorMode.FINAL; + } else if (mode == AggregatorMode.FINAL || mode == AggregatorMode.INTERMEDIATE) { if (grouping) { sourceAttr = aggregateMapper.mapGrouping(aggregateFunction); } else { @@ -279,7 +288,7 @@ private void aggregatesToFactory( assert inputChannels.size() > 0 && inputChannels.stream().allMatch(i -> i >= 0); } if (aggregateFunction instanceof ToAggregator agg) { - consumer.accept(new AggFunctionSupplierContext(agg.supplier(inputChannels), aggMode)); + consumer.accept(new AggFunctionSupplierContext(agg.supplier(inputChannels), mode)); } else { throw new EsqlIllegalArgumentException("aggregate functions must extend ToAggregator"); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Mapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Mapper.java index 62b305a68bc28..1212e77557ca6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Mapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Mapper.java @@ -54,7 +54,7 @@ public class Mapper { private final FunctionRegistry functionRegistry; - private final boolean localMode; + private final boolean localMode; // non-coordinator (data node) mode public Mapper(FunctionRegistry functionRegistry) { this.functionRegistry = functionRegistry; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java index 26c57f13e16c4..fbfc57261bc40 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java @@ -24,6 +24,7 @@ import org.elasticsearch.xpack.esql.plan.logical.Aggregate; import org.elasticsearch.xpack.esql.plan.logical.EsRelation; import org.elasticsearch.xpack.esql.plan.logical.TopN; +import org.elasticsearch.xpack.esql.plan.physical.AggregateExec; import org.elasticsearch.xpack.esql.plan.physical.EsQueryExec; import org.elasticsearch.xpack.esql.plan.physical.EsSourceExec; import org.elasticsearch.xpack.esql.plan.physical.EstimatesRowSize; @@ -87,23 +88,19 @@ public static PhysicalPlan dataNodeReductionPlan(LogicalPlan plan, PhysicalPlan if (pipelineBreakers.isEmpty() == false) { UnaryPlan pipelineBreaker = (UnaryPlan) pipelineBreakers.get(0); - if (pipelineBreaker instanceof TopN topN) { - return new TopNExec(topN.source(), unused, topN.order(), topN.limit(), 2000); + if (pipelineBreaker instanceof TopN) { + Mapper mapper = new Mapper(true); + var physicalPlan = EstimatesRowSize.estimateRowSize(0, mapper.map(plan)); + return physicalPlan.collectFirstChildren(TopNExec.class::isInstance).get(0); } else if (pipelineBreaker instanceof Limit limit) { return new LimitExec(limit.source(), unused, limit.limit()); } else if (pipelineBreaker instanceof OrderBy order) { return new OrderExec(order.source(), unused, order.order()); - } else if (pipelineBreaker instanceof Aggregate aggregate) { - // TODO handle this as a special PARTIAL step (intermediate) - /*return new AggregateExec( - aggregate.source(), - unused, - aggregate.groupings(), - aggregate.aggregates(), - AggregateExec.Mode.PARTIAL, - 0 - );*/ - return null; + } else if (pipelineBreaker instanceof Aggregate) { + Mapper mapper = new Mapper(true); + var physicalPlan = EstimatesRowSize.estimateRowSize(0, mapper.map(plan)); + var aggregate = (AggregateExec) physicalPlan.collectFirstChildren(AggregateExec.class::isInstance).get(0); + return aggregate.withMode(AggregateExec.Mode.PARTIAL); } else { throw new EsqlIllegalArgumentException("unsupported unary physical plan node [" + pipelineBreaker.nodeName() + "]"); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ClusterComputeRequest.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ClusterComputeRequest.java index 2f5920a4e32c9..bedbd517f1184 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ClusterComputeRequest.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ClusterComputeRequest.java @@ -93,7 +93,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(clusterAlias); out.writeString(sessionId); configuration.writeTo(out); - new PlanStreamOutput(out, planNameRegistry).writePhysicalPlanNode(plan); + new PlanStreamOutput(out, planNameRegistry, configuration).writePhysicalPlanNode(plan); out.writeStringArray(indices); out.writeStringArray(originalIndices); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java index 7b38197dde95a..d9005d5997b34 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java @@ -205,6 +205,7 @@ public void execute( RefCountingListener refs = new RefCountingListener(listener.map(unused -> new Result(collectedPages, collectedProfiles))) ) { // run compute on the coordinator + exchangeSource.addCompletionListener(refs.acquire()); runCompute( rootTask, new ComputeContext(sessionId, RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY, List.of(), configuration, exchangeSource, null), @@ -722,6 +723,7 @@ private void runComputeOnDataNode( var externalSink = exchangeService.getSinkHandler(externalId); task.addListener(() -> exchangeService.finishSinkHandler(externalId, new TaskCancelledException(task.getReasonCancelled()))); var exchangeSource = new ExchangeSourceHandler(1, esqlExecutor); + exchangeSource.addCompletionListener(refs.acquire()); exchangeSource.addRemoteSink(internalSink::fetchPageAsync, 1); ActionListener reductionListener = cancelOnFailure(task, cancelled, refs.acquire()); runCompute( @@ -854,6 +856,7 @@ void runComputeOnRemoteCluster( RefCountingListener refs = new RefCountingListener(listener.map(unused -> new ComputeResponse(collectedProfiles))) ) { exchangeSink.addCompletionListener(refs.acquire()); + exchangeSource.addCompletionListener(refs.acquire()); PhysicalPlan coordinatorPlan = new ExchangeSinkExec( plan.source(), plan.output(), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequest.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequest.java index 6c87b226aa590..ab2df4a2ba6a9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequest.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequest.java @@ -70,7 +70,7 @@ final class DataNodeRequest extends TransportRequest implements IndicesRequest { // TODO make EsqlConfiguration Releasable new BlockStreamInput(in, new BlockFactory(new NoopCircuitBreaker(CircuitBreaker.REQUEST), BigArrays.NON_RECYCLING_INSTANCE)) ); - if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_CLUSTER_ALIAS)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { this.clusterAlias = in.readString(); } else { this.clusterAlias = RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY; @@ -85,12 +85,12 @@ public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeString(sessionId); configuration.writeTo(out); - if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_CLUSTER_ALIAS)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeString(clusterAlias); } out.writeCollection(shardIds); out.writeMap(aliasFilters); - new PlanStreamOutput(out, planNameRegistry).writePhysicalPlanNode(plan); + new PlanStreamOutput(out, planNameRegistry, configuration).writePhysicalPlanNode(plan); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java index f6b534f7316df..4f852264193b4 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java @@ -136,6 +136,17 @@ public class EsqlFeatures implements FeatureSpecification { */ public static final NodeFeature METADATA_FIELDS = new NodeFeature("esql.metadata_fields"); + /** + * Support for loading values over enrich. This is supported by all versions of ESQL but not + * the unit test CsvTests. + */ + public static final NodeFeature ENRICH_LOAD = new NodeFeature("esql.enrich_load"); + + /** + * Support for timespan units abbreviations + */ + public static final NodeFeature TIMESPAN_ABBREVIATIONS = new NodeFeature("esql.timespan_abbreviations"); + @Override public Set getFeatures() { return Set.of( @@ -157,7 +168,8 @@ public Set getFeatures() { MV_ORDERING_SORTED_ASCENDING, METRICS_COUNTER_FIELDS, STRING_LITERAL_AUTO_CASTING_EXTENDED, - METADATA_FIELDS + METADATA_FIELDS, + TIMESPAN_ABBREVIATIONS ); } @@ -168,7 +180,8 @@ public Map getHistoricalFeatures() { Map.entry(MV_WARN, Version.V_8_12_0), Map.entry(SPATIAL_POINTS, Version.V_8_12_0), Map.entry(CONVERT_WARN, Version.V_8_12_0), - Map.entry(POW_DOUBLE, Version.V_8_12_0) + Map.entry(POW_DOUBLE, Version.V_8_12_0), + Map.entry(ENRICH_LOAD, Version.V_8_12_0) ); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverter.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverter.java index e4c7983d9a83a..e1360c67976ca 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverter.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverter.java @@ -234,18 +234,20 @@ public static DataType commonType(DataType left, DataType right) { return DataTypeConverter.commonType(left, right); } + // generally supporting abbreviations from https://en.wikipedia.org/wiki/Unit_of_time public static TemporalAmount parseTemporalAmout(Number value, String qualifier, Source source) throws InvalidArgumentException, ArithmeticException, ParsingException { return switch (qualifier) { - case "millisecond", "milliseconds" -> Duration.ofMillis(safeToLong(value)); - case "second", "seconds" -> Duration.ofSeconds(safeToLong(value)); - case "minute", "minutes" -> Duration.ofMinutes(safeToLong(value)); - case "hour", "hours" -> Duration.ofHours(safeToLong(value)); - - case "day", "days" -> Period.ofDays(safeToInt(safeToLong(value))); - case "week", "weeks" -> Period.ofWeeks(safeToInt(safeToLong(value))); - case "month", "months" -> Period.ofMonths(safeToInt(safeToLong(value))); - case "year", "years" -> Period.ofYears(safeToInt(safeToLong(value))); + case "millisecond", "milliseconds", "ms" -> Duration.ofMillis(safeToLong(value)); + case "second", "seconds", "sec", "s" -> Duration.ofSeconds(safeToLong(value)); + case "minute", "minutes", "min" -> Duration.ofMinutes(safeToLong(value)); + case "hour", "hours", "h" -> Duration.ofHours(safeToLong(value)); + + case "day", "days", "d" -> Period.ofDays(safeToInt(safeToLong(value))); + case "week", "weeks", "w" -> Period.ofWeeks(safeToInt(safeToLong(value))); + case "month", "months", "mo" -> Period.ofMonths(safeToInt(safeToLong(value))); + case "quarter", "quarters", "q" -> Period.ofMonths(safeToInt(Math.multiplyExact(3L, safeToLong(value)))); + case "year", "years", "yr", "y" -> Period.ofYears(safeToInt(safeToLong(value))); default -> throw new ParsingException(source, "Unexpected time interval qualifier: '{}'", qualifier); }; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index c865b21723a9e..cb8700d5d7602 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -107,9 +107,12 @@ import static org.elasticsearch.xpack.esql.CsvTestsDataLoader.CSV_DATASET_MAP; import static org.elasticsearch.xpack.esql.EsqlTestUtils.TEST_VERIFIER; import static org.elasticsearch.xpack.esql.EsqlTestUtils.loadMapping; +import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.cap; import static org.elasticsearch.xpack.ql.CsvSpecReader.specParser; import static org.elasticsearch.xpack.ql.TestUtils.classpathResources; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; @@ -144,7 +147,6 @@ public class CsvTests extends ESTestCase { private static final Logger LOGGER = LogManager.getLogger(CsvTests.class); - private static final String IGNORED_CSV_FILE_NAMES_PATTERN = "-IT_tests_only"; private final String fileName; private final String groupName; @@ -164,10 +166,8 @@ public class CsvTests extends ESTestCase { @ParametersFactory(argumentFormatting = "%2$s.%3$s") public static List readScriptSpec() throws Exception { - List urls = classpathResources("/*.csv-spec").stream() - .filter(x -> x.toString().contains(IGNORED_CSV_FILE_NAMES_PATTERN) == false) - .toList(); - assertTrue("Not enough specs found " + urls, urls.size() > 0); + List urls = classpathResources("/*.csv-spec"); + assertThat("Not enough specs found " + urls, urls, hasSize(greaterThan(0))); return SpecReader.readScriptSpec(urls, specParser()); } @@ -223,7 +223,8 @@ public final void test() throws Throwable { * The csv tests support all but a few features. The unsupported features * are tested in integration tests. */ - assumeFalse("metadata fields aren't supported", testCase.requiredFeatures.contains(EsqlFeatures.METADATA_FIELDS.id())); + assumeFalse("metadata fields aren't supported", testCase.requiredCapabilities.contains(cap(EsqlFeatures.METADATA_FIELDS))); + assumeFalse("enrich can't load fields in csv tests", testCase.requiredCapabilities.contains(cap(EsqlFeatures.ENRICH_LOAD))); doTest(); } catch (Throwable th) { throw reworkException(th); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/SerializationTestUtils.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/SerializationTestUtils.java index 312250d2f58d0..185fb14503cab 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/SerializationTestUtils.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/SerializationTestUtils.java @@ -79,7 +79,7 @@ public static T serializeDeserialize(T orig, Serializer serializer, Deser public static T serializeDeserialize(T orig, Serializer serializer, Deserializer deserializer, EsqlConfiguration config) { try (BytesStreamOutput out = new BytesStreamOutput()) { - PlanStreamOutput planStreamOutput = new PlanStreamOutput(out, planNameRegistry); + PlanStreamOutput planStreamOutput = new PlanStreamOutput(out, planNameRegistry, config); serializer.write(planStreamOutput, orig); StreamInput in = new NamedWriteableAwareStreamInput( ByteBufferStreamInput.wrap(BytesReference.toBytes(out.bytes())), diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index c19d48f3cd50e..4867b0c62a18c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -166,14 +166,17 @@ protected static Iterable parameterSuppliersFromTypedData(List values) { - return new Page(BlockUtils.fromListRow(TestBlockFactory.getNonBreakingInstance(), values)); + return new Page(1, BlockUtils.fromListRow(TestBlockFactory.getNonBreakingInstance(), values)); } /** @@ -249,18 +252,15 @@ protected final void assertResolveTypeValid(Expression expression, DataType expe } public final void testEvaluate() { - assumeTrue("All test data types must be representable in order to build fields", testCase.allTypesAreRepresentable()); + assumeTrue("Can't build evaluator", testCase.canBuildEvaluator()); + assumeTrue("Expected type must be representable to build an evaluator", EsqlDataTypes.isRepresentable(testCase.expectedType())); logger.info( "Test Values: " + testCase.getData().stream().map(TestCaseSupplier.TypedData::toString).collect(Collectors.joining(",")) ); boolean readFloating = randomBoolean(); Expression expression = readFloating ? buildDeepCopyOfFieldExpression(testCase) : buildFieldExpression(testCase); if (testCase.getExpectedTypeError() != null) { - assertTrue("expected unresolved", expression.typeResolved().unresolved()); - if (readFloating == false) { - // The hack that creates floating fields changes the error message so don't assert it - assertThat(expression.typeResolved().message(), equalTo(testCase.getExpectedTypeError())); - } + assertTypeResolutionFailure(expression); return; } Expression.TypeResolution resolution = expression.typeResolved(); @@ -306,7 +306,13 @@ private Object toJavaObjectUnsignedLongAware(Block block, int position) { *

*/ public final void testEvaluateBlockWithoutNulls() { - testEvaluateBlock(driverContext().blockFactory(), driverContext(), false); + assumeTrue("no warning is expected", testCase.getExpectedWarnings() == null); + try { + testEvaluateBlock(driverContext().blockFactory(), driverContext(), false); + } catch (CircuitBreakingException ex) { + assertThat(ex.getMessage(), equalTo(MockBigArrays.ERROR_MESSAGE)); + assertFalse("Test data is too large to fit in the memory", true); + } } /** @@ -314,7 +320,13 @@ public final void testEvaluateBlockWithoutNulls() { * some null values inserted between. */ public final void testEvaluateBlockWithNulls() { - testEvaluateBlock(driverContext().blockFactory(), driverContext(), true); + assumeTrue("no warning is expected", testCase.getExpectedWarnings() == null); + try { + testEvaluateBlock(driverContext().blockFactory(), driverContext(), true); + } catch (CircuitBreakingException ex) { + assertThat(ex.getMessage(), equalTo(MockBigArrays.ERROR_MESSAGE)); + assertFalse("Test data is too large to fit in the memory", true); + } } /** @@ -355,13 +367,17 @@ protected Matcher allNullsMatcher() { } private void testEvaluateBlock(BlockFactory inputBlockFactory, DriverContext context, boolean insertNulls) { - assumeTrue("can only run on representable types", testCase.allTypesAreRepresentable()); - assumeTrue("must build evaluator to test sending it blocks", testCase.getExpectedTypeError() == null); - boolean readFloating = randomBoolean(); + Expression expression = randomBoolean() ? buildDeepCopyOfFieldExpression(testCase) : buildFieldExpression(testCase); + if (testCase.getExpectedTypeError() != null) { + assertTypeResolutionFailure(expression); + return; + } + assumeTrue("Can't build evaluator", testCase.canBuildEvaluator()); + assumeTrue("Expected type must be representable to build an evaluator", EsqlDataTypes.isRepresentable(testCase.expectedType())); int positions = between(1, 1024); List data = testCase.getData(); Page onePositionPage = row(testCase.getDataValues()); - Block[] manyPositionsBlocks = new Block[data.size()]; + Block[] manyPositionsBlocks = new Block[Math.toIntExact(data.stream().filter(d -> d.isForceLiteral() == false).count())]; Set nullPositions = insertNulls ? IntStream.range(0, positions).filter(i -> randomBoolean()).mapToObj(Integer::valueOf).collect(Collectors.toSet()) : Set.of(); @@ -369,8 +385,12 @@ private void testEvaluateBlock(BlockFactory inputBlockFactory, DriverContext con nullPositions = Set.of(); } try { - for (int b = 0; b < data.size(); b++) { - ElementType elementType = PlannerUtils.toElementType(data.get(b).type()); + int b = 0; + for (TestCaseSupplier.TypedData d : data) { + if (d.isForceLiteral()) { + continue; + } + ElementType elementType = PlannerUtils.toElementType(d.type()); try (Block.Builder builder = elementType.newBlockBuilder(positions, inputBlockFactory)) { for (int p = 0; p < positions; p++) { if (nullPositions.contains(p)) { @@ -381,9 +401,12 @@ private void testEvaluateBlock(BlockFactory inputBlockFactory, DriverContext con } manyPositionsBlocks[b] = builder.build(); } + b++; } - Expression expression = readFloating ? buildDeepCopyOfFieldExpression(testCase) : buildFieldExpression(testCase); - try (ExpressionEvaluator eval = evaluator(expression).get(context); Block block = eval.eval(new Page(manyPositionsBlocks))) { + try ( + ExpressionEvaluator eval = evaluator(expression).get(context); + Block block = eval.eval(new Page(positions, manyPositionsBlocks)) + ) { for (int p = 0; p < positions; p++) { if (nullPositions.contains(p)) { assertThat(toJavaObject(block, p), allNullsMatcher()); @@ -405,13 +428,15 @@ private void testEvaluateBlock(BlockFactory inputBlockFactory, DriverContext con } } - // TODO cranky time - public void testSimpleWithNulls() { // TODO replace this with nulls inserted into the test case like anyNullIsNull - assumeTrue("nothing to do if a type error", testCase.getExpectedTypeError() == null); - assumeTrue("All test data types must be representable in order to build fields", testCase.allTypesAreRepresentable()); + Expression expression = buildFieldExpression(testCase); + if (testCase.getExpectedTypeError() != null) { + assertTypeResolutionFailure(expression); + return; + } + assumeTrue("Can't build evaluator", testCase.canBuildEvaluator()); List simpleData = testCase.getDataValues(); - try (EvalOperator.ExpressionEvaluator eval = evaluator(buildFieldExpression(testCase)).get(driverContext())) { + try (EvalOperator.ExpressionEvaluator eval = evaluator(expression).get(driverContext())) { BlockFactory blockFactory = TestBlockFactory.getNonBreakingInstance(); Block[] orig = BlockUtils.fromListRow(blockFactory, simpleData); for (int i = 0; i < orig.length; i++) { @@ -450,11 +475,16 @@ protected void assertSimpleWithNulls(List data, Block value, int nullBlo } public final void testEvaluateInManyThreads() throws ExecutionException, InterruptedException { - assumeTrue("nothing to do if a type error", testCase.getExpectedTypeError() == null); - assumeTrue("All test data types must be representable in order to build fields", testCase.allTypesAreRepresentable()); + Expression expression = buildFieldExpression(testCase); + if (testCase.getExpectedTypeError() != null) { + assertTypeResolutionFailure(expression); + return; + } + assumeTrue("Can't build evaluator", testCase.canBuildEvaluator()); + assumeTrue("Expected type must be representable to build an evaluator", EsqlDataTypes.isRepresentable(testCase.expectedType())); int count = 10_000; int threads = 5; - var evalSupplier = evaluator(buildFieldExpression(testCase)); + var evalSupplier = evaluator(expression); ExecutorService exec = Executors.newFixedThreadPool(threads); try { List> futures = new ArrayList<>(); @@ -481,17 +511,25 @@ public final void testEvaluateInManyThreads() throws ExecutionException, Interru } public final void testEvaluatorToString() { - assumeTrue("nothing to do if a type error", testCase.getExpectedTypeError() == null); - assumeTrue("All test data types must be representable in order to build fields", testCase.allTypesAreRepresentable()); - var factory = evaluator(buildFieldExpression(testCase)); + Expression expression = buildFieldExpression(testCase); + if (testCase.getExpectedTypeError() != null) { + assertTypeResolutionFailure(expression); + return; + } + assumeTrue("Can't build evaluator", testCase.canBuildEvaluator()); + var factory = evaluator(expression); try (ExpressionEvaluator ev = factory.get(driverContext())) { assertThat(ev.toString(), testCase.evaluatorToString()); } } public final void testFactoryToString() { - assumeTrue("nothing to do if a type error", testCase.getExpectedTypeError() == null); - assumeTrue("All test data types must be representable in order to build fields", testCase.allTypesAreRepresentable()); + Expression expression = buildFieldExpression(testCase); + if (testCase.getExpectedTypeError() != null) { + assertTypeResolutionFailure(expression); + return; + } + assumeTrue("Can't build evaluator", testCase.canBuildEvaluator()); var factory = evaluator(buildFieldExpression(testCase)); assertThat(factory.toString(), testCase.evaluatorToString()); } @@ -499,8 +537,7 @@ public final void testFactoryToString() { public final void testFold() { Expression expression = buildLiteralExpression(testCase); if (testCase.getExpectedTypeError() != null) { - assertTrue(expression.typeResolved().unresolved()); - assertThat(expression.typeResolved().message(), equalTo(testCase.getExpectedTypeError())); + assertTypeResolutionFailure(expression); return; } assertFalse(expression.typeResolved().unresolved()); @@ -524,7 +561,6 @@ public final void testFold() { } public void testSerializationOfSimple() { - assumeTrue("All test data types must be representable in order to build fields", testCase.allTypesAreRepresentable()); assertSerialization(buildFieldExpression(testCase)); } @@ -594,7 +630,7 @@ protected static List anyNullIsNull(boolean entirelyNullPreser (nullPosition, nullValueDataType, original) -> entirelyNullPreservesType == false && nullValueDataType == DataTypes.NULL && original.getData().size() == 1 ? DataTypes.NULL : original.expectedType(), - (nullPosition, original) -> original + (nullPosition, nullData, original) -> original ); } @@ -603,7 +639,7 @@ public interface ExpectedType { } public interface ExpectedEvaluatorToString { - Matcher evaluatorToString(int nullPosition, Matcher original); + Matcher evaluatorToString(int nullPosition, TestCaseSupplier.TypedData nullData, Matcher original); } protected static List anyNullIsNull( @@ -635,10 +671,11 @@ protected static List anyNullIsNull( TestCaseSupplier.TypedData od = oc.getData().get(i); return i == finalNullPosition ? od.forceValueToNull() : od; }).toList(); + TestCaseSupplier.TypedData nulledData = oc.getData().get(finalNullPosition); return new TestCaseSupplier.TestCase( data, - evaluatorToString.evaluatorToString(finalNullPosition, oc.evaluatorToString()), - expectedType.expectedType(finalNullPosition, oc.getData().get(finalNullPosition).type(), oc), + evaluatorToString.evaluatorToString(finalNullPosition, nulledData, oc.evaluatorToString()), + expectedType.expectedType(finalNullPosition, nulledData.type(), oc), nullValue(), null, oc.getExpectedTypeError(), @@ -1092,6 +1129,11 @@ protected static DataType[] representableNonSpatialTypes() { return representableNonSpatial().toArray(DataType[]::new); } + protected final void assertTypeResolutionFailure(Expression expression) { + assertTrue("expected unresolved", expression.typeResolved().unresolved()); + assertThat(expression.typeResolved().message(), equalTo(testCase.getExpectedTypeError())); + } + @AfterClass public static void renderSignature() throws IOException { if (System.getProperty("generateDocs") == null) { @@ -1532,17 +1574,18 @@ private static void writeToTempDir(String subdir, String str, String extension) private final List breakers = Collections.synchronizedList(new ArrayList<>()); protected final DriverContext driverContext() { - MockBigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, ByteSizeValue.ofGb(1)); + BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, ByteSizeValue.ofMb(256)).withCircuitBreaking(); CircuitBreaker breaker = bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST); breakers.add(breaker); - return new DriverContext(bigArrays.withCircuitBreaking(), new BlockFactory(breaker, bigArrays)); + return new DriverContext(bigArrays, new BlockFactory(breaker, bigArrays)); } protected final DriverContext crankyContext() { - BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, new CrankyCircuitBreakerService()); + BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, new CrankyCircuitBreakerService()) + .withCircuitBreaking(); CircuitBreaker breaker = bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST); breakers.add(breaker); - return new DriverContext(bigArrays.withCircuitBreaking(), new BlockFactory(breaker, bigArrays)); + return new DriverContext(bigArrays, new BlockFactory(breaker, bigArrays)); } @After diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java index 7cfe950bb3144..d9261a1658969 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java @@ -92,7 +92,7 @@ public static List stringCases( expected, lhsSuppliers, rhsSuppliers, - evaluatorToString, + (lhs, rhs) -> equalTo(evaluatorToString.apply(lhs, rhs)), (lhs, rhs) -> warnings, suppliers, expectedType, @@ -202,16 +202,18 @@ public static List forBinaryCastingToDouble( (l, r) -> expected.apply(((Number) l).doubleValue(), ((Number) r).doubleValue()), lhsSuppliers, rhsSuppliers, - (lhsType, rhsType) -> name - + "[" - + lhsName - + "=" - + castToDoubleEvaluator("Attribute[channel=0]", lhsType) - + ", " - + rhsName - + "=" - + castToDoubleEvaluator("Attribute[channel=1]", rhsType) - + "]", + (lhsType, rhsType) -> equalTo( + name + + "[" + + lhsName + + "=" + + castToDoubleEvaluator("Attribute[channel=0]", lhsType) + + ", " + + rhsName + + "=" + + castToDoubleEvaluator("Attribute[channel=1]", rhsType) + + "]" + ), (lhs, rhs) -> warnings, suppliers, DataTypes.DOUBLE, @@ -224,7 +226,7 @@ public static void casesCrossProduct( BinaryOperator expected, List lhsSuppliers, List rhsSuppliers, - BiFunction evaluatorToString, + BiFunction> evaluatorToString, BiFunction> warnings, List suppliers, DataType expectedType, @@ -243,7 +245,7 @@ public static void casesCrossProduct( public static TestCaseSupplier testCaseSupplier( TypedDataSupplier lhsSupplier, TypedDataSupplier rhsSupplier, - BiFunction evaluatorToString, + BiFunction> evaluatorToString, DataType expectedType, BinaryOperator expectedValue ) { @@ -253,7 +255,7 @@ public static TestCaseSupplier testCaseSupplier( private static TestCaseSupplier testCaseSupplier( TypedDataSupplier lhsSupplier, TypedDataSupplier rhsSupplier, - BiFunction evaluatorToString, + BiFunction> evaluatorToString, DataType expectedType, BinaryOperator expectedValue, BiFunction> warnings @@ -366,7 +368,7 @@ public static List forBinaryComparisonWithWidening( (l, r) -> expectedTypeStuff.expected().apply((Number) l, (Number) r), getSuppliersForNumericType(lhsType, expectedTypeStuff.min(), expectedTypeStuff.max(), allowRhsZero), getSuppliersForNumericType(rhsType, expectedTypeStuff.min(), expectedTypeStuff.max(), allowRhsZero), - evaluatorToString, + (lhs, rhs) -> equalTo(evaluatorToString.apply(lhs, rhs)), warnings, suppliers, DataTypes.BOOLEAN, @@ -391,16 +393,18 @@ public static List forBinaryWithWidening( for (DataType rhsType : numericTypes) { DataType expected = widen(lhsType, rhsType); NumericTypeTestConfig expectedTypeStuff = typeStuff.get(expected); - BiFunction evaluatorToString = (lhs, rhs) -> expectedTypeStuff.evaluatorName() - + "[" - + lhsName - + "=" - + getCastEvaluator("Attribute[channel=0]", lhs, expected) - + ", " - + rhsName - + "=" - + getCastEvaluator("Attribute[channel=1]", rhs, expected) - + "]"; + BiFunction> evaluatorToString = (lhs, rhs) -> equalTo( + expectedTypeStuff.evaluatorName() + + "[" + + lhsName + + "=" + + getCastEvaluator("Attribute[channel=0]", lhs, expected) + + ", " + + rhsName + + "=" + + getCastEvaluator("Attribute[channel=1]", rhs, expected) + + "]" + ); casesCrossProduct( (l, r) -> expectedTypeStuff.expected().apply((Number) l, (Number) r), getSuppliersForNumericType(lhsType, expectedTypeStuff.min(), expectedTypeStuff.max(), true), @@ -429,26 +433,22 @@ public static List forBinaryNotCasting( boolean symmetric ) { return forBinaryNotCasting( - name, - lhsName, - rhsName, expected, expectedType, lhsSuppliers, rhsSuppliers, + equalTo(name + "[" + lhsName + "=Attribute[channel=0], " + rhsName + "=Attribute[channel=1]]"), (lhs, rhs) -> warnings, symmetric ); } public static List forBinaryNotCasting( - String name, - String lhsName, - String rhsName, BinaryOperator expected, DataType expectedType, List lhsSuppliers, List rhsSuppliers, + Matcher evaluatorToString, BiFunction> warnings, boolean symmetric ) { @@ -457,7 +457,7 @@ public static List forBinaryNotCasting( expected, lhsSuppliers, rhsSuppliers, - (lhsType, rhsType) -> name + "[" + lhsName + "=Attribute[channel=0], " + rhsName + "=Attribute[channel=1]]", + (lhsType, rhsType) -> evaluatorToString, warnings, suppliers, expectedType, @@ -1006,7 +1006,7 @@ public static List dateCases() { public static List datePeriodCases() { return List.of( - new TypedDataSupplier("", () -> Period.ZERO, EsqlDataTypes.DATE_PERIOD), + new TypedDataSupplier("", () -> Period.ZERO, EsqlDataTypes.DATE_PERIOD, true), new TypedDataSupplier( "", () -> Period.of( @@ -1014,18 +1014,20 @@ public static List datePeriodCases() { ESTestCase.randomIntBetween(-13, 13), ESTestCase.randomIntBetween(-32, 32) ), - EsqlDataTypes.DATE_PERIOD + EsqlDataTypes.DATE_PERIOD, + true ) ); } public static List timeDurationCases() { return List.of( - new TypedDataSupplier("", () -> Duration.ZERO, EsqlDataTypes.TIME_DURATION), + new TypedDataSupplier("", () -> Duration.ZERO, EsqlDataTypes.TIME_DURATION, true), new TypedDataSupplier( "", () -> Duration.ofMillis(ESTestCase.randomLongBetween(-604800000L, 604800000L)), // plus/minus 7 days - EsqlDataTypes.TIME_DURATION + EsqlDataTypes.TIME_DURATION, + true ) ); } @@ -1237,7 +1239,7 @@ public static class TestCase { private final String[] expectedWarnings; private final String expectedTypeError; - private final boolean allTypesAreRepresentable; + private final boolean canBuildEvaluator; private final Class foldingExceptionClass; private final String foldingExceptionMessage; @@ -1271,7 +1273,7 @@ public static TestCase typeError(List data, String expectedTypeError) this.matcher = matcher; this.expectedWarnings = expectedWarnings; this.expectedTypeError = expectedTypeError; - this.allTypesAreRepresentable = data.stream().allMatch(d -> EsqlDataTypes.isRepresentable(d.type)); + this.canBuildEvaluator = data.stream().allMatch(d -> d.forceLiteral || EsqlDataTypes.isRepresentable(d.type)); this.foldingExceptionClass = foldingExceptionClass; this.foldingExceptionMessage = foldingExceptionMessage; } @@ -1297,11 +1299,11 @@ public List getDataAsLiterals() { } public List getDataValues() { - return data.stream().map(t -> t.data()).collect(Collectors.toList()); + return data.stream().filter(d -> d.forceLiteral == false).map(TypedData::data).collect(Collectors.toList()); } - public boolean allTypesAreRepresentable() { - return allTypesAreRepresentable; + public boolean canBuildEvaluator() { + return canBuildEvaluator; } public Matcher getMatcher() { @@ -1428,6 +1430,13 @@ public TypedData forceLiteral() { return new TypedData(data, type, name, true); } + /** + * Has this been forced to a {@link Literal}. + */ + public boolean isForceLiteral() { + return forceLiteral; + } + /** * Return a {@link TypedData} that always returns {@code null} for it's * value without modifying anything else in the supplier. diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java index 90692d5b19df1..ee23cf00a37a0 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; +import java.math.BigInteger; import java.util.List; import java.util.function.Function; import java.util.function.Supplier; @@ -32,6 +33,7 @@ import static org.elasticsearch.compute.data.BlockUtils.toJavaObject; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.nullValue; public class CaseTests extends AbstractFunctionTestCase { @@ -44,26 +46,173 @@ public CaseTests(@Name("TestCase") Supplier testCaseS */ @ParametersFactory public static Iterable parameters() { - return parameterSuppliersFromTypedData(List.of(new TestCaseSupplier("basics", () -> { - List typedData = List.of( - new TestCaseSupplier.TypedData(true, DataTypes.BOOLEAN, "cond"), - new TestCaseSupplier.TypedData(new BytesRef("a"), DataTypes.KEYWORD, "a"), - new TestCaseSupplier.TypedData(new BytesRef("b"), DataTypes.KEYWORD, "b") - ); - return new TestCaseSupplier.TestCase( - typedData, - "CaseEvaluator[resultType=BYTES_REF, conditions=[ConditionEvaluator[condition=Attribute[channel=0], " - + "value=Attribute[channel=1]]], elseVal=Attribute[channel=2]]", - DataTypes.KEYWORD, - equalTo(new BytesRef("a")) - ); - }))); + return parameterSuppliersFromTypedData( + List.of(new TestCaseSupplier("keyword", List.of(DataTypes.BOOLEAN, DataTypes.KEYWORD, DataTypes.KEYWORD), () -> { + List typedData = List.of( + new TestCaseSupplier.TypedData(true, DataTypes.BOOLEAN, "cond"), + new TestCaseSupplier.TypedData(new BytesRef("a"), DataTypes.KEYWORD, "a"), + new TestCaseSupplier.TypedData(new BytesRef("b"), DataTypes.KEYWORD, "b") + ); + return new TestCaseSupplier.TestCase( + typedData, + "CaseEvaluator[resultType=BYTES_REF, conditions=[ConditionEvaluator[condition=Attribute[channel=0], " + + "value=Attribute[channel=1]]], elseVal=Attribute[channel=2]]", + DataTypes.KEYWORD, + equalTo(new BytesRef("a")) + ); + }), new TestCaseSupplier("text", List.of(DataTypes.BOOLEAN, DataTypes.TEXT), () -> { + List typedData = List.of( + new TestCaseSupplier.TypedData(false, DataTypes.BOOLEAN, "cond"), + new TestCaseSupplier.TypedData(new BytesRef("a"), DataTypes.TEXT, "trueValue") + ); + return new TestCaseSupplier.TestCase( + typedData, + "CaseEvaluator[resultType=BYTES_REF, conditions=[ConditionEvaluator[condition=Attribute[channel=0], " + + "value=Attribute[channel=1]]], elseVal=LiteralsEvaluator[lit=null]]", + DataTypes.TEXT, + nullValue() + ); + }), new TestCaseSupplier("boolean", List.of(DataTypes.BOOLEAN, DataTypes.BOOLEAN), () -> { + List typedData = List.of( + new TestCaseSupplier.TypedData(false, DataTypes.BOOLEAN, "cond"), + new TestCaseSupplier.TypedData(false, DataTypes.BOOLEAN, "trueValue") + ); + return new TestCaseSupplier.TestCase( + typedData, + "CaseEvaluator[resultType=BOOLEAN, conditions=[ConditionEvaluator[condition=Attribute[channel=0], " + + "value=Attribute[channel=1]]], elseVal=LiteralsEvaluator[lit=null]]", + DataTypes.BOOLEAN, + nullValue() + ); + }), new TestCaseSupplier("date", List.of(DataTypes.BOOLEAN, DataTypes.DATETIME), () -> { + long value = randomNonNegativeLong(); + List typedData = List.of( + new TestCaseSupplier.TypedData(true, DataTypes.BOOLEAN, "cond"), + new TestCaseSupplier.TypedData(value, DataTypes.DATETIME, "trueValue") + ); + return new TestCaseSupplier.TestCase( + typedData, + "CaseEvaluator[resultType=LONG, conditions=[ConditionEvaluator[condition=Attribute[channel=0], " + + "value=Attribute[channel=1]]], elseVal=LiteralsEvaluator[lit=null]]", + DataTypes.DATETIME, + equalTo(value) + ); + }), new TestCaseSupplier("double", List.of(DataTypes.BOOLEAN, DataTypes.DOUBLE), () -> { + double value = randomDouble(); + List typedData = List.of( + new TestCaseSupplier.TypedData(true, DataTypes.BOOLEAN, "cond"), + new TestCaseSupplier.TypedData(value, DataTypes.DOUBLE, "trueValue") + ); + return new TestCaseSupplier.TestCase( + typedData, + "CaseEvaluator[resultType=DOUBLE, conditions=[ConditionEvaluator[condition=Attribute[channel=0], " + + "value=Attribute[channel=1]]], elseVal=LiteralsEvaluator[lit=null]]", + DataTypes.DOUBLE, + equalTo(value) + ); + }), new TestCaseSupplier("integer", List.of(DataTypes.BOOLEAN, DataTypes.INTEGER), () -> { + int value = randomInt(); + List typedData = List.of( + new TestCaseSupplier.TypedData(false, DataTypes.BOOLEAN, "cond"), + new TestCaseSupplier.TypedData(value, DataTypes.INTEGER, "trueValue") + ); + return new TestCaseSupplier.TestCase( + typedData, + "CaseEvaluator[resultType=INT, conditions=[ConditionEvaluator[condition=Attribute[channel=0], " + + "value=Attribute[channel=1]]], elseVal=LiteralsEvaluator[lit=null]]", + DataTypes.INTEGER, + nullValue() + ); + }), new TestCaseSupplier("long", List.of(DataTypes.BOOLEAN, DataTypes.LONG), () -> { + long value = randomLong(); + List typedData = List.of( + new TestCaseSupplier.TypedData(false, DataTypes.BOOLEAN, "cond"), + new TestCaseSupplier.TypedData(value, DataTypes.LONG, "trueValue") + ); + return new TestCaseSupplier.TestCase( + typedData, + "CaseEvaluator[resultType=LONG, conditions=[ConditionEvaluator[condition=Attribute[channel=0], " + + "value=Attribute[channel=1]]], elseVal=LiteralsEvaluator[lit=null]]", + DataTypes.LONG, + nullValue() + ); + }), new TestCaseSupplier("unsigned_long", List.of(DataTypes.BOOLEAN, DataTypes.UNSIGNED_LONG), () -> { + BigInteger value = randomUnsignedLongBetween(BigInteger.ZERO, UNSIGNED_LONG_MAX); + List typedData = List.of( + new TestCaseSupplier.TypedData(true, DataTypes.BOOLEAN, "cond"), + new TestCaseSupplier.TypedData(value, DataTypes.UNSIGNED_LONG, "trueValue") + ); + return new TestCaseSupplier.TestCase( + typedData, + "CaseEvaluator[resultType=LONG, conditions=[ConditionEvaluator[condition=Attribute[channel=0], " + + "value=Attribute[channel=1]]], elseVal=LiteralsEvaluator[lit=null]]", + DataTypes.UNSIGNED_LONG, + equalTo(value) + ); + }), new TestCaseSupplier("ip", List.of(DataTypes.BOOLEAN, DataTypes.IP), () -> { + BytesRef value = (BytesRef) randomLiteral(DataTypes.IP).value(); + List typedData = List.of( + new TestCaseSupplier.TypedData(true, DataTypes.BOOLEAN, "cond"), + new TestCaseSupplier.TypedData(value, DataTypes.IP, "trueValue") + ); + return new TestCaseSupplier.TestCase( + typedData, + "CaseEvaluator[resultType=BYTES_REF, conditions=[ConditionEvaluator[condition=Attribute[channel=0], " + + "value=Attribute[channel=1]]], elseVal=LiteralsEvaluator[lit=null]]", + DataTypes.IP, + equalTo(value) + ); + }), new TestCaseSupplier("version", List.of(DataTypes.BOOLEAN, DataTypes.VERSION), () -> { + BytesRef value = (BytesRef) randomLiteral(DataTypes.VERSION).value(); + List typedData = List.of( + new TestCaseSupplier.TypedData(false, DataTypes.BOOLEAN, "cond"), + new TestCaseSupplier.TypedData(value, DataTypes.VERSION, "trueValue") + ); + return new TestCaseSupplier.TestCase( + typedData, + "CaseEvaluator[resultType=BYTES_REF, conditions=[ConditionEvaluator[condition=Attribute[channel=0], " + + "value=Attribute[channel=1]]], elseVal=LiteralsEvaluator[lit=null]]", + DataTypes.VERSION, + nullValue() + ); + }), new TestCaseSupplier("cartesian_point", List.of(DataTypes.BOOLEAN, EsqlDataTypes.CARTESIAN_POINT), () -> { + BytesRef value = (BytesRef) randomLiteral(EsqlDataTypes.CARTESIAN_POINT).value(); + List typedData = List.of( + new TestCaseSupplier.TypedData(false, DataTypes.BOOLEAN, "cond"), + new TestCaseSupplier.TypedData(value, EsqlDataTypes.CARTESIAN_POINT, "trueValue") + ); + return new TestCaseSupplier.TestCase( + typedData, + "CaseEvaluator[resultType=BYTES_REF, conditions=[ConditionEvaluator[condition=Attribute[channel=0], " + + "value=Attribute[channel=1]]], elseVal=LiteralsEvaluator[lit=null]]", + EsqlDataTypes.CARTESIAN_POINT, + nullValue() + ); + }), new TestCaseSupplier("geo_point", List.of(DataTypes.BOOLEAN, EsqlDataTypes.GEO_POINT), () -> { + BytesRef value = (BytesRef) randomLiteral(EsqlDataTypes.GEO_POINT).value(); + List typedData = List.of( + new TestCaseSupplier.TypedData(true, DataTypes.BOOLEAN, "cond"), + new TestCaseSupplier.TypedData(value, EsqlDataTypes.GEO_POINT, "trueValue") + ); + return new TestCaseSupplier.TestCase( + typedData, + "CaseEvaluator[resultType=BYTES_REF, conditions=[ConditionEvaluator[condition=Attribute[channel=0], " + + "value=Attribute[channel=1]]], elseVal=LiteralsEvaluator[lit=null]]", + EsqlDataTypes.GEO_POINT, + equalTo(value) + ); + })) + ); } @Override protected void assertSimpleWithNulls(List data, Block value, int nullBlock) { if (nullBlock == 0) { - assertThat(toJavaObject(value, 0), equalTo(data.get(2))); + if (data.size() == 2) { + assertThat(value.isNull(0), equalTo(true)); + } else if (data.size() > 2) { + assertThat(toJavaObject(value, 0), equalTo(data.get(2))); + } return; } if (((Boolean) data.get(0)).booleanValue()) { @@ -77,7 +226,11 @@ protected void assertSimpleWithNulls(List data, Block value, int nullBlo if (nullBlock == 2) { super.assertSimpleWithNulls(data, value, nullBlock); } else { - assertThat(toJavaObject(value, 0), equalTo(data.get(2))); + if (data.size() > 2) { + assertThat(toJavaObject(value, 0), equalTo(data.get(2))); + } else { + super.assertSimpleWithNulls(data, value, nullBlock); + } } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/BucketTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/BucketTests.java index cc2714dc31dca..a73b4a0dfa557 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/BucketTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/BucketTests.java @@ -46,14 +46,16 @@ public static Iterable parameters() { "fixed date with period", () -> DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis("2023-01-01T00:00:00.00Z"), EsqlDataTypes.DATE_PERIOD, - Period.ofYears(1) + Period.ofYears(1), + "[YEAR_OF_CENTURY in Z][fixed to midnight]" ); dateCasesWithSpan( suppliers, "fixed date with duration", () -> DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis("2023-02-17T09:00:00.00Z"), EsqlDataTypes.TIME_DURATION, - Duration.ofDays(1L) + Duration.ofDays(1L), + "[86400000 in Z][fixed]" ); numberCases(suppliers, "fixed long", DataTypes.LONG, () -> 100L); numberCasesWithSpan(suppliers, "fixed long with span", DataTypes.LONG, () -> 100L); @@ -68,7 +70,7 @@ public static Iterable parameters() { (nullPosition, nullValueDataType, original) -> nullPosition == 0 && nullValueDataType == DataTypes.NULL ? DataTypes.NULL : original.expectedType(), - (nullPosition, original) -> nullPosition == 0 ? original : equalTo("LiteralsEvaluator[lit=null]") + (nullPosition, nullData, original) -> nullPosition == 0 ? original : equalTo("LiteralsEvaluator[lit=null]") ) ); } @@ -112,7 +114,8 @@ private static void dateCasesWithSpan( String name, LongSupplier date, DataType spanType, - Object span + Object span, + String spanStr ) { suppliers.add(new TestCaseSupplier(name, List.of(DataTypes.DATETIME, spanType), () -> { List args = new ArrayList<>(); @@ -120,7 +123,7 @@ private static void dateCasesWithSpan( args.add(new TestCaseSupplier.TypedData(span, spanType, "buckets").forceLiteral()); return new TestCaseSupplier.TestCase( args, - "DateTruncEvaluator[fieldVal=Attribute[channel=0], rounding=Rounding[DAY_OF_MONTH in Z][fixed to midnight]]", + "DateTruncEvaluator[fieldVal=Attribute[channel=0], rounding=Rounding" + spanStr + "]", DataTypes.DATETIME, dateResultsMatcher(args) ); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/NowTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/NowTests.java new file mode 100644 index 0000000000000..b4f195c5929e3 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/NowTests.java @@ -0,0 +1,68 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.xpack.esql.EsqlTestUtils; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.esql.expression.function.scalar.AbstractConfigurationFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.scalar.date.Now; +import org.elasticsearch.xpack.esql.session.EsqlConfiguration; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataTypes; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.function.Supplier; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.matchesPattern; + +public class NowTests extends AbstractConfigurationFunctionTestCase { + public NowTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + return parameterSuppliersFromTypedData( + List.of( + new TestCaseSupplier( + "Now Test", + () -> new TestCaseSupplier.TestCase( + List.of(), + matchesPattern("LiteralsEvaluator\\[lit=.*\\]"), + DataTypes.DATETIME, + equalTo(EsqlTestUtils.TEST_CFG.now().toInstant().toEpochMilli()) + ) + ) + ) + ); + } + + @Override + protected Expression buildWithConfiguration(Source source, List args, EsqlConfiguration configuration) { + return new Now(Source.EMPTY, configuration); + } + + @Override + protected void assertSimpleWithNulls(List data, Block value, int nullBlock) { + assertThat(((LongBlock) value).asVector().getLong(0), equalTo(EsqlTestUtils.TEST_CFG.now().toInstant().toEpochMilli())); + } + + @Override + protected Matcher allNullsMatcher() { + return equalTo(EsqlTestUtils.TEST_CFG.now().toInstant().toEpochMilli()); + } + +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundTests.java index 30460828aaa91..097f3c1038cfb 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundTests.java @@ -64,7 +64,7 @@ public static Iterable parameters() { suppliers = anyNullIsNull( suppliers, (nullPosition, nullValueDataType, original) -> nullPosition == 0 ? nullValueDataType : original.expectedType(), - (nullPosition, original) -> original + (nullPosition, nullData, original) -> original ); suppliers.add(new TestCaseSupplier("two doubles", List.of(DataTypes.DOUBLE, DataTypes.INTEGER), () -> { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceTests.java index 260813bacb8f2..bf16344847bde 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceTests.java @@ -48,7 +48,7 @@ public static Iterable parameters() { (nullPosition, nullValueDataType, original) -> nullPosition == 0 && nullValueDataType == DataTypes.NULL ? DataTypes.NULL : original.expectedType(), - (nullPosition, original) -> original + (nullPosition, nullData, original) -> original ) ); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunctionTestCase.java index e905f85141f31..64e03dec6b064 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunctionTestCase.java @@ -16,6 +16,7 @@ import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; import org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes; +import org.hamcrest.Matcher; import java.io.IOException; import java.lang.reflect.Field; @@ -28,6 +29,7 @@ import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.isSpatial; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.isSpatialGeo; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.isString; +import static org.hamcrest.Matchers.equalTo; public abstract class SpatialRelatesFunctionTestCase extends AbstractFunctionTestCase { @@ -188,11 +190,11 @@ private static DataType pickSpatialType(DataType leftType, DataType rightType) { } } - private static String spatialEvaluatorString(DataType leftType, DataType rightType) { + private static Matcher spatialEvaluatorString(DataType leftType, DataType rightType) { String crsType = isSpatialGeo(pickSpatialType(leftType, rightType)) ? "Geo" : "Cartesian"; - return getFunctionClassName() - + crsType - + "SourceAndSourceEvaluator[leftValue=Attribute[channel=0], rightValue=Attribute[channel=1]]"; + return equalTo( + getFunctionClassName() + crsType + "SourceAndSourceEvaluator[leftValue=Attribute[channel=0], rightValue=Attribute[channel=1]]" + ); } private static int countGeo(DataType... types) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddTests.java index 2daf2688d6631..25ccd91f43d07 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddTests.java @@ -33,8 +33,10 @@ import static org.elasticsearch.xpack.ql.type.DateUtils.asDateTime; import static org.elasticsearch.xpack.ql.type.DateUtils.asMillis; import static org.elasticsearch.xpack.ql.util.NumericUtils.asLongUnsigned; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.Matchers.startsWith; public class AddTests extends AbstractFunctionTestCase { public AddTests(@Name("TestCase") Supplier testCaseSupplier) { @@ -94,27 +96,23 @@ public static Iterable parameters() { suppliers.addAll( TestCaseSupplier.forBinaryNotCasting( - "No evaluator, the tests only trigger the folding code since Period is not representable", - "lhs", - "rhs", (lhs, rhs) -> ((Period) lhs).plus((Period) rhs), EsqlDataTypes.DATE_PERIOD, TestCaseSupplier.datePeriodCases(), TestCaseSupplier.datePeriodCases(), - List.of(), + startsWith("LiteralsEvaluator[lit="), // lhs and rhs have to be literals, so we fold into a literal + (lhs, rhs) -> List.of(), true ) ); suppliers.addAll( TestCaseSupplier.forBinaryNotCasting( - "No evaluator, the tests only trigger the folding code since Duration is not representable", - "lhs", - "rhs", (lhs, rhs) -> ((Duration) lhs).plus((Duration) rhs), EsqlDataTypes.TIME_DURATION, TestCaseSupplier.timeDurationCases(), TestCaseSupplier.timeDurationCases(), - List.of(), + startsWith("LiteralsEvaluator[lit="), // lhs and rhs have to be literals, so we fold into a literal + (lhs, rhs) -> List.of(), true ) ); @@ -139,28 +137,22 @@ public static Iterable parameters() { }; suppliers.addAll( TestCaseSupplier.forBinaryNotCasting( - // TODO: There is an evaluator for Datetime + Period, so it should be tested. Similarly below. - "No evaluator, the tests only trigger the folding code since Period is not representable", - "lhs", - "rhs", result, DataTypes.DATETIME, TestCaseSupplier.dateCases(), TestCaseSupplier.datePeriodCases(), + startsWith("AddDatetimesEvaluator[datetime=Attribute[channel=0], temporalAmount="), warnings, true ) ); suppliers.addAll( TestCaseSupplier.forBinaryNotCasting( - // TODO: There is an evaluator for Datetime + Duration, so it should be tested. Similarly above. - "No evaluator, the tests only trigger the folding code since Duration is not representable", - "lhs", - "rhs", result, DataTypes.DATETIME, TestCaseSupplier.dateCases(), TestCaseSupplier.timeDurationCases(), + startsWith("AddDatetimesEvaluator[datetime=Attribute[channel=0], temporalAmount="), warnings, true ) @@ -192,7 +184,12 @@ public static Iterable parameters() { // Datetime tests are split in two, depending on their permissiveness of null-injection, which cannot happen "automatically" for // Datetime + Period/Duration, since the expression will take the non-null arg's type. - suppliers = errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers), AddTests::addErrorMessageString); + suppliers = anyNullIsNull( + suppliers, + (nullPosition, nullType, original) -> original.expectedType(), + (nullPosition, nullData, original) -> nullData.isForceLiteral() ? equalTo("LiteralsEvaluator[lit=null]") : original + ); + suppliers = errorsForCasesWithoutExamples(suppliers, AddTests::addErrorMessageString); // Cases that should generate warnings suppliers.addAll(List.of(new TestCaseSupplier("MV", () -> { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivTests.java index f3348ab2dcba5..eb29a7b5ce06e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivTests.java @@ -16,6 +16,7 @@ import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; +import org.hamcrest.Matcher; import java.math.BigInteger; import java.util.ArrayList; @@ -24,6 +25,8 @@ import java.util.function.BiFunction; import java.util.function.Supplier; +import static org.hamcrest.Matchers.equalTo; + public class DivTests extends AbstractFunctionTestCase { public DivTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); @@ -116,16 +119,18 @@ public static Iterable parameters() { for (DataType rhsType : numericTypes) { DataType expected = TestCaseSupplier.widen(lhsType, rhsType); TestCaseSupplier.NumericTypeTestConfig expectedTypeStuff = typeStuff.get(expected); - BiFunction evaluatorToString = (lhs, rhs) -> expectedTypeStuff.evaluatorName() - + "[" - + "lhs" - + "=" - + TestCaseSupplier.getCastEvaluator("Attribute[channel=0]", lhs, expected) - + ", " - + "rhs" - + "=" - + TestCaseSupplier.getCastEvaluator("Attribute[channel=1]", rhs, expected) - + "]"; + BiFunction> evaluatorToString = (lhs, rhs) -> equalTo( + expectedTypeStuff.evaluatorName() + + "[" + + "lhs" + + "=" + + TestCaseSupplier.getCastEvaluator("Attribute[channel=0]", lhs, expected) + + ", " + + "rhs" + + "=" + + TestCaseSupplier.getCastEvaluator("Attribute[channel=1]", rhs, expected) + + "]" + ); TestCaseSupplier.casesCrossProduct( (l1, r1) -> expectedTypeStuff.expected().apply((Number) l1, (Number) r1), TestCaseSupplier.getSuppliersForNumericType(lhsType, expectedTypeStuff.min(), expectedTypeStuff.max(), true), diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModTests.java index a70f2c7885257..bc6d6dd97c3ef 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModTests.java @@ -16,6 +16,7 @@ import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; +import org.hamcrest.Matcher; import java.math.BigInteger; import java.util.ArrayList; @@ -24,6 +25,8 @@ import java.util.function.BiFunction; import java.util.function.Supplier; +import static org.hamcrest.Matchers.equalTo; + public class ModTests extends AbstractFunctionTestCase { public ModTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); @@ -103,16 +106,18 @@ public static Iterable parameters() { for (DataType rhsType : numericTypes) { DataType expected = TestCaseSupplier.widen(lhsType, rhsType); TestCaseSupplier.NumericTypeTestConfig expectedTypeStuff = typeStuff.get(expected); - BiFunction evaluatorToString = (lhs, rhs) -> expectedTypeStuff.evaluatorName() - + "[" - + "lhs" - + "=" - + TestCaseSupplier.getCastEvaluator("Attribute[channel=0]", lhs, expected) - + ", " - + "rhs" - + "=" - + TestCaseSupplier.getCastEvaluator("Attribute[channel=1]", rhs, expected) - + "]"; + BiFunction> evaluatorToString = (lhs, rhs) -> equalTo( + expectedTypeStuff.evaluatorName() + + "[" + + "lhs" + + "=" + + TestCaseSupplier.getCastEvaluator("Attribute[channel=0]", lhs, expected) + + ", " + + "rhs" + + "=" + + TestCaseSupplier.getCastEvaluator("Attribute[channel=1]", rhs, expected) + + "]" + ); TestCaseSupplier.casesCrossProduct( (l1, r1) -> expectedTypeStuff.expected().apply((Number) l1, (Number) r1), TestCaseSupplier.getSuppliersForNumericType(lhsType, expectedTypeStuff.min(), expectedTypeStuff.max(), true), diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegTests.java index f5e5e9f406f22..c65f4eed2de70 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegTests.java @@ -155,7 +155,7 @@ public void testEdgeCases() { } private Object process(Object val) { - if (testCase.allTypesAreRepresentable()) { + if (testCase.canBuildEvaluator()) { Neg neg = new Neg(Source.EMPTY, field("val", typeOf(val))); try (Block block = evaluator(neg).get(driverContext()).eval(row(List.of(val)))) { return toJavaObject(block, 0); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypesTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypesTests.java index 23f9e93ac72a6..cfa3b4a8ea6ae 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypesTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypesTests.java @@ -207,7 +207,7 @@ public void testWrappedStreamSimple() throws IOException { // write BytesStreamOutput bso = new BytesStreamOutput(); bso.writeString("hello"); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); var plan = new RowExec(Source.EMPTY, List.of(new Alias(Source.EMPTY, "foo", field("field", DataTypes.LONG)))); out.writePhysicalPlanNode(plan); bso.writeVInt(11_345); @@ -230,7 +230,7 @@ public void testUnsupportedAttributeSimple() throws IOException { new NameId() ); BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); PlanNamedTypes.writeUnsupportedAttr(out, orig); var in = planStreamInput(bso); var deser = PlanNamedTypes.readUnsupportedAttr(in); @@ -255,7 +255,7 @@ public void testFieldAttributeSimple() throws IOException { true // synthetic ); BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); PlanNamedTypes.writeFieldAttribute(out, orig); var in = planStreamInput(bso); var deser = PlanNamedTypes.readFieldAttribute(in); @@ -277,7 +277,7 @@ public void testKeywordEsFieldSimple() throws IOException { true // alias ); BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); PlanNamedTypes.writeKeywordEsField(out, orig); var deser = PlanNamedTypes.readKeywordEsField(planStreamInput(bso)); EqualsHashCodeTestUtils.checkEqualsAndHashCode(orig, unused -> deser); @@ -295,7 +295,7 @@ public void testTextdEsFieldSimple() throws IOException { true // alias ); BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); PlanNamedTypes.writeTextEsField(out, orig); var deser = PlanNamedTypes.readTextEsField(planStreamInput(bso)); EqualsHashCodeTestUtils.checkEqualsAndHashCode(orig, unused -> deser); @@ -308,7 +308,7 @@ public void testTextEsField() { public void testInvalidMappedFieldSimple() throws IOException { var orig = new InvalidMappedField("foo", "bar"); BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); PlanNamedTypes.writeInvalidMappedField(out, orig); var deser = PlanNamedTypes.readInvalidMappedField(planStreamInput(bso)); EqualsHashCodeTestUtils.checkEqualsAndHashCode(orig, unused -> deser); @@ -321,7 +321,7 @@ public void testInvalidMappedField() { public void testEsDateFieldSimple() throws IOException { var orig = DateEsField.dateEsField("birth_date", Map.of(), false); BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); PlanNamedTypes.writeDateEsField(out, orig); var deser = PlanNamedTypes.readDateEsField(planStreamInput(bso)); EqualsHashCodeTestUtils.checkEqualsAndHashCode(orig, unused -> deser); @@ -330,7 +330,7 @@ public void testEsDateFieldSimple() throws IOException { public void testBinComparisonSimple() throws IOException { var orig = new Equals(Source.EMPTY, field("foo", DataTypes.DOUBLE), field("bar", DataTypes.DOUBLE)); BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); out.writeNamed(EsqlBinaryComparison.class, orig); var deser = (Equals) planStreamInput(bso).readNamed(EsqlBinaryComparison.class); EqualsHashCodeTestUtils.checkEqualsAndHashCode(orig, unused -> deser); @@ -345,7 +345,7 @@ public void testBinComparison() { public void testAggFunctionSimple() throws IOException { var orig = new Avg(Source.EMPTY, field("foo_val", DataTypes.DOUBLE)); BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); out.writeNamed(AggregateFunction.class, orig); var deser = (Avg) planStreamInput(bso).readNamed(AggregateFunction.class); EqualsHashCodeTestUtils.checkEqualsAndHashCode(orig, unused -> deser); @@ -358,7 +358,7 @@ public void testAggFunction() { public void testArithmeticOperationSimple() throws IOException { var orig = new Add(Source.EMPTY, field("foo", DataTypes.LONG), field("bar", DataTypes.LONG)); BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); out.writeNamed(ArithmeticOperation.class, orig); var deser = (Add) planStreamInput(bso).readNamed(ArithmeticOperation.class); EqualsHashCodeTestUtils.checkEqualsAndHashCode(orig, unused -> deser); @@ -373,7 +373,7 @@ public void testArithmeticOperation() { public void testSubStringSimple() throws IOException { var orig = new Substring(Source.EMPTY, field("foo", DataTypes.KEYWORD), new Literal(Source.EMPTY, 1, DataTypes.INTEGER), null); BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); PlanNamedTypes.writeSubstring(out, orig); var deser = PlanNamedTypes.readSubstring(planStreamInput(bso)); EqualsHashCodeTestUtils.checkEqualsAndHashCode(orig, unused -> deser); @@ -382,7 +382,7 @@ public void testSubStringSimple() throws IOException { public void testStartsWithSimple() throws IOException { var orig = new StartsWith(Source.EMPTY, field("foo", DataTypes.KEYWORD), new Literal(Source.EMPTY, "fo", DataTypes.KEYWORD)); BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); PlanNamedTypes.writeStartsWith(out, orig); var deser = PlanNamedTypes.readStartsWith(planStreamInput(bso)); EqualsHashCodeTestUtils.checkEqualsAndHashCode(orig, unused -> deser); @@ -391,7 +391,7 @@ public void testStartsWithSimple() throws IOException { public void testRoundSimple() throws IOException { var orig = new Round(Source.EMPTY, field("value", DataTypes.DOUBLE), new Literal(Source.EMPTY, 1, DataTypes.INTEGER)); BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); PlanNamedTypes.writeRound(out, orig); var deser = PlanNamedTypes.readRound(planStreamInput(bso)); EqualsHashCodeTestUtils.checkEqualsAndHashCode(orig, unused -> deser); @@ -400,7 +400,7 @@ public void testRoundSimple() throws IOException { public void testPowSimple() throws IOException { var orig = new Pow(Source.EMPTY, field("value", DataTypes.DOUBLE), new Literal(Source.EMPTY, 1, DataTypes.INTEGER)); BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); PlanNamedTypes.writePow(out, orig); var deser = PlanNamedTypes.readPow(planStreamInput(bso)); EqualsHashCodeTestUtils.checkEqualsAndHashCode(orig, unused -> deser); @@ -409,7 +409,7 @@ public void testPowSimple() throws IOException { public void testAliasSimple() throws IOException { var orig = new Alias(Source.EMPTY, "alias_name", field("a", DataTypes.LONG)); BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); PlanNamedTypes.writeAlias(out, orig); var in = planStreamInput(bso); var deser = PlanNamedTypes.readAlias(in); @@ -420,7 +420,7 @@ public void testAliasSimple() throws IOException { public void testLiteralSimple() throws IOException { var orig = new Literal(Source.EMPTY, 1, DataTypes.INTEGER); BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); PlanNamedTypes.writeLiteral(out, orig); var deser = PlanNamedTypes.readLiteral(planStreamInput(bso)); EqualsHashCodeTestUtils.checkEqualsAndHashCode(orig, unused -> deser); @@ -429,7 +429,7 @@ public void testLiteralSimple() throws IOException { public void testOrderSimple() throws IOException { var orig = new Order(Source.EMPTY, field("val", DataTypes.INTEGER), Order.OrderDirection.ASC, Order.NullsPosition.FIRST); BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); PlanNamedTypes.writeOrder(out, orig); var deser = (Order) PlanNamedTypes.readOrder(planStreamInput(bso)); EqualsHashCodeTestUtils.checkEqualsAndHashCode(orig, unused -> deser); @@ -438,7 +438,7 @@ public void testOrderSimple() throws IOException { public void testFieldSortSimple() throws IOException { var orig = new EsQueryExec.FieldSort(field("val", DataTypes.LONG), Order.OrderDirection.ASC, Order.NullsPosition.FIRST); BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); PlanNamedTypes.writeFieldSort(out, orig); var deser = PlanNamedTypes.readFieldSort(planStreamInput(bso)); EqualsHashCodeTestUtils.checkEqualsAndHashCode(orig, unused -> deser); @@ -447,7 +447,7 @@ public void testFieldSortSimple() throws IOException { public void testEsIndexSimple() throws IOException { var orig = new EsIndex("test*", Map.of("first_name", new KeywordEsField("first_name")), Set.of("test1", "test2")); BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); PlanNamedTypes.writeEsIndex(out, orig); var deser = PlanNamedTypes.readEsIndex(planStreamInput(bso)); EqualsHashCodeTestUtils.checkEqualsAndHashCode(orig, unused -> deser); @@ -457,7 +457,7 @@ public void testDissectParserSimple() throws IOException { String pattern = "%{b} %{c}"; var orig = new Dissect.Parser(pattern, ",", new DissectParser(pattern, ",")); BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); PlanNamedTypes.writeDissectParser(out, orig); var deser = PlanNamedTypes.readDissectParser(planStreamInput(bso)); EqualsHashCodeTestUtils.checkEqualsAndHashCode(orig, unused -> deser); @@ -466,7 +466,7 @@ public void testDissectParserSimple() throws IOException { public void testEsRelation() throws IOException { var orig = new EsRelation(Source.EMPTY, randomEsIndex(), List.of(randomFieldAttribute()), randomEsSourceOptions(), randomBoolean()); BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); PlanNamedTypes.writeEsRelation(out, orig); var deser = PlanNamedTypes.readEsRelation(planStreamInput(bso)); EqualsHashCodeTestUtils.checkEqualsAndHashCode(orig, unused -> deser); @@ -479,7 +479,7 @@ public void testEsqlProject() throws IOException { List.of(randomFieldAttribute()) ); BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); PlanNamedTypes.writeEsqlProject(out, orig); var deser = PlanNamedTypes.readEsqlProject(planStreamInput(bso)); EqualsHashCodeTestUtils.checkEqualsAndHashCode(orig, unused -> deser); @@ -495,7 +495,7 @@ public void testMvExpand() throws IOException { ); var orig = new MvExpand(Source.EMPTY, esRelation, randomFieldAttribute(), randomFieldAttribute()); BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); PlanNamedTypes.writeMvExpand(out, orig); var deser = PlanNamedTypes.readMvExpand(planStreamInput(bso)); EqualsHashCodeTestUtils.checkEqualsAndHashCode(orig, unused -> deser); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutputTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutputTests.java index 7f683e8f8003b..bc69b4454df81 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutputTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutputTests.java @@ -8,23 +8,130 @@ package org.elasticsearch.xpack.esql.io.stream; import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.breaker.NoopCircuitBreaker; import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.TransportVersionUtils; +import org.elasticsearch.xpack.esql.Column; +import org.elasticsearch.xpack.esql.session.EsqlConfiguration; +import org.elasticsearch.xpack.esql.session.EsqlConfigurationSerializationTests; +import org.elasticsearch.xpack.ql.type.DataTypes; + +import java.io.IOException; +import java.util.Map; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.lessThan; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.sameInstance; public class PlanStreamOutputTests extends ESTestCase { - public void testTransportVersion() { + public void testTransportVersion() throws IOException { BytesStreamOutput out = new BytesStreamOutput(); TransportVersion v1 = TransportVersionUtils.randomCompatibleVersion(random()); out.setTransportVersion(v1); - PlanStreamOutput planOut = new PlanStreamOutput(out, PlanNameRegistry.INSTANCE); + PlanStreamOutput planOut = new PlanStreamOutput( + out, + PlanNameRegistry.INSTANCE, + randomBoolean() ? null : EsqlConfigurationSerializationTests.randomConfiguration() + ); assertThat(planOut.getTransportVersion(), equalTo(v1)); TransportVersion v2 = TransportVersionUtils.randomCompatibleVersion(random()); planOut.setTransportVersion(v2); assertThat(planOut.getTransportVersion(), equalTo(v2)); assertThat(out.getTransportVersion(), equalTo(v2)); } + + public void testWriteBlockFromConfig() throws IOException { + String tableName = randomAlphaOfLength(5); + String columnName = randomAlphaOfLength(10); + try (Column c = randomColumn()) { + EsqlConfiguration configuration = randomConfiguration(Map.of(tableName, Map.of(columnName, c))); + try ( + BytesStreamOutput out = new BytesStreamOutput(); + PlanStreamOutput planStream = new PlanStreamOutput(out, PlanNameRegistry.INSTANCE, configuration) + ) { + planStream.writeCachedBlock(c.values()); + assertThat(out.bytes().length(), equalTo(3 + tableName.length() + columnName.length())); + try ( + PlanStreamInput in = new PlanStreamInput(out.bytes().streamInput(), PlanNameRegistry.INSTANCE, REGISTRY, configuration) + ) { + assertThat(in.readCachedBlock(), sameInstance(c.values())); + } + } + } + } + + public void testWriteBlockOnce() throws IOException { + try (Block b = randomColumn().values()) { + EsqlConfiguration configuration = EsqlConfigurationSerializationTests.randomConfiguration(); + try ( + BytesStreamOutput out = new BytesStreamOutput(); + PlanStreamOutput planStream = new PlanStreamOutput(out, PlanNameRegistry.INSTANCE, configuration) + ) { + planStream.writeCachedBlock(b); + assertThat(out.bytes().length(), greaterThan(4 * LEN)); + assertThat(out.bytes().length(), lessThan(8 * LEN)); + try ( + PlanStreamInput in = new PlanStreamInput(out.bytes().streamInput(), PlanNameRegistry.INSTANCE, REGISTRY, configuration) + ) { + Block read = in.readCachedBlock(); + assertThat(read, not(sameInstance(b))); + assertThat(read, equalTo(b)); + } + } + } + } + + public void testWriteBlockTwice() throws IOException { + try (Block b = randomColumn().values()) { + EsqlConfiguration configuration = EsqlConfigurationSerializationTests.randomConfiguration(); + try ( + BytesStreamOutput out = new BytesStreamOutput(); + PlanStreamOutput planStream = new PlanStreamOutput(out, PlanNameRegistry.INSTANCE, configuration) + ) { + planStream.writeCachedBlock(b); + planStream.writeCachedBlock(b); + assertThat(out.bytes().length(), greaterThan(4 * LEN)); + assertThat(out.bytes().length(), lessThan(8 * LEN)); + try ( + PlanStreamInput in = new PlanStreamInput(out.bytes().streamInput(), PlanNameRegistry.INSTANCE, REGISTRY, configuration) + ) { + Block read = in.readCachedBlock(); + assertThat(read, not(sameInstance(b))); + assertThat(read, equalTo(b)); + assertThat(in.readCachedBlock(), sameInstance(read)); + } + } + } + } + + private EsqlConfiguration randomConfiguration(Map> tables) { + return EsqlConfigurationSerializationTests.randomConfiguration("query_" + randomAlphaOfLength(1), tables); + } + + private static final int LEN = 10000; + + private Column randomColumn() { + try (IntBlock.Builder ints = BLOCK_FACTORY.newIntBlockBuilder(LEN)) { + for (int i = 0; i < LEN; i++) { + ints.appendInt(randomInt()); + } + return new Column(DataTypes.INTEGER, ints.build()); + } + } + + private static final BlockFactory BLOCK_FACTORY = BlockFactory.getInstance( + new NoopCircuitBreaker("noop-esql-breaker"), + BigArrays.NON_RECYCLING_INSTANCE + ); + + private static final NamedWriteableRegistry REGISTRY = new NamedWriteableRegistry(Block.getNamedWriteables()); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/ExpressionTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/ExpressionTests.java index a0f226946cc36..9157f186ade92 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/ExpressionTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/ExpressionTests.java @@ -380,14 +380,18 @@ public void testDurationLiterals() { assertEquals(l(Duration.ZERO, TIME_DURATION), whereExpression("0 second")); assertEquals(l(Duration.ofSeconds(value), TIME_DURATION), whereExpression(value + "second")); assertEquals(l(Duration.ofSeconds(value), TIME_DURATION), whereExpression(value + " seconds")); + assertEquals(l(Duration.ofSeconds(value), TIME_DURATION), whereExpression(value + " sec")); + assertEquals(l(Duration.ofSeconds(value), TIME_DURATION), whereExpression(value + " s")); assertEquals(l(Duration.ZERO, TIME_DURATION), whereExpression("0 minute")); assertEquals(l(Duration.ofMinutes(value), TIME_DURATION), whereExpression(value + "minute")); assertEquals(l(Duration.ofMinutes(value), TIME_DURATION), whereExpression(value + " minutes")); + assertEquals(l(Duration.ofMinutes(value), TIME_DURATION), whereExpression(value + " min")); assertEquals(l(Duration.ZERO, TIME_DURATION), whereExpression("0 hour")); assertEquals(l(Duration.ofHours(value), TIME_DURATION), whereExpression(value + "hour")); assertEquals(l(Duration.ofHours(value), TIME_DURATION), whereExpression(value + " hours")); + assertEquals(l(Duration.ofHours(value), TIME_DURATION), whereExpression(value + " h")); assertEquals(l(Duration.ofHours(-value), TIME_DURATION), whereExpression("-" + value + " hours")); } @@ -395,22 +399,33 @@ public void testDurationLiterals() { public void testDatePeriodLiterals() { int value = randomInt(Integer.MAX_VALUE); int weeksValue = randomInt(Integer.MAX_VALUE / 7); + int quartersValue = randomInt(Integer.MAX_VALUE / 3); assertEquals(l(Period.ZERO, DATE_PERIOD), whereExpression("0 day")); assertEquals(l(Period.ofDays(value), DATE_PERIOD), whereExpression(value + "day")); assertEquals(l(Period.ofDays(value), DATE_PERIOD), whereExpression(value + " days")); + assertEquals(l(Period.ofDays(value), DATE_PERIOD), whereExpression(value + " d")); assertEquals(l(Period.ZERO, DATE_PERIOD), whereExpression("0week")); assertEquals(l(Period.ofDays(weeksValue * 7), DATE_PERIOD), whereExpression(weeksValue + "week")); assertEquals(l(Period.ofDays(weeksValue * 7), DATE_PERIOD), whereExpression(weeksValue + " weeks")); + assertEquals(l(Period.ofDays(weeksValue * 7), DATE_PERIOD), whereExpression(weeksValue + " w")); assertEquals(l(Period.ZERO, DATE_PERIOD), whereExpression("0 month")); assertEquals(l(Period.ofMonths(value), DATE_PERIOD), whereExpression(value + "month")); assertEquals(l(Period.ofMonths(value), DATE_PERIOD), whereExpression(value + " months")); + assertEquals(l(Period.ofMonths(value), DATE_PERIOD), whereExpression(value + " mo")); + + assertEquals(l(Period.ZERO, DATE_PERIOD), whereExpression("0 quarter")); + assertEquals(l(Period.ofMonths(Math.multiplyExact(quartersValue, 3)), DATE_PERIOD), whereExpression(quartersValue + " quarter")); + assertEquals(l(Period.ofMonths(Math.multiplyExact(quartersValue, 3)), DATE_PERIOD), whereExpression(quartersValue + " quarters")); + assertEquals(l(Period.ofMonths(Math.multiplyExact(quartersValue, 3)), DATE_PERIOD), whereExpression(quartersValue + " q")); assertEquals(l(Period.ZERO, DATE_PERIOD), whereExpression("0year")); assertEquals(l(Period.ofYears(value), DATE_PERIOD), whereExpression(value + "year")); assertEquals(l(Period.ofYears(value), DATE_PERIOD), whereExpression(value + " years")); + assertEquals(l(Period.ofYears(value), DATE_PERIOD), whereExpression(value + " yr")); + assertEquals(l(Period.ofYears(value), DATE_PERIOD), whereExpression(value + " y")); assertEquals(l(Period.ofYears(-value), DATE_PERIOD), whereExpression("-" + value + " years")); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java index cf0dfa372ea3f..ddd53cad8ec6d 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.esql.parser; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.Build; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.common.Randomness; import org.elasticsearch.core.Tuple; @@ -44,6 +45,7 @@ import org.elasticsearch.xpack.ql.expression.function.UnresolvedFunction; import org.elasticsearch.xpack.ql.expression.predicate.logical.Not; import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparison; +import org.elasticsearch.xpack.ql.plan.TableIdentifier; import org.elasticsearch.xpack.ql.plan.logical.Filter; import org.elasticsearch.xpack.ql.plan.logical.Limit; import org.elasticsearch.xpack.ql.plan.logical.LogicalPlan; @@ -338,17 +340,17 @@ public void testInlineStatsWithoutGroups() { } public void testIdentifiersAsIndexPattern() { - assertIdentifierAsIndexPattern("foo", "from `foo`"); - assertIdentifierAsIndexPattern("foo,test-*", "from `foo`,`test-*`"); + // assertIdentifierAsIndexPattern("foo", "from `foo`"); + // assertIdentifierAsIndexPattern("foo,test-*", "from `foo`,`test-*`"); assertIdentifierAsIndexPattern("foo,test-*", "from foo,test-*"); assertIdentifierAsIndexPattern("123-test@foo_bar+baz1", "from 123-test@foo_bar+baz1"); - assertIdentifierAsIndexPattern("foo,test-*,abc", "from `foo`,`test-*`,abc"); - assertIdentifierAsIndexPattern("foo, test-*, abc, xyz", "from `foo, test-*, abc, xyz`"); - assertIdentifierAsIndexPattern("foo, test-*, abc, xyz,test123", "from `foo, test-*, abc, xyz`, test123"); + // assertIdentifierAsIndexPattern("foo,test-*,abc", "from `foo`,`test-*`,abc"); + // assertIdentifierAsIndexPattern("foo, test-*, abc, xyz", "from `foo, test-*, abc, xyz`"); + // assertIdentifierAsIndexPattern("foo, test-*, abc, xyz,test123", "from `foo, test-*, abc, xyz`, test123"); assertIdentifierAsIndexPattern("foo,test,xyz", "from foo, test,xyz"); assertIdentifierAsIndexPattern( - ",", - "from , ``" + "", // , + "from " // , `` ); } @@ -1049,6 +1051,147 @@ public void testInlineConvertUnsupportedType() { expectError("ROW 3::BYTE", "line 1:6: Unsupported conversion to type [BYTE]"); } + public void testMetricsWithoutStats() { + assumeTrue("requires snapshot build", Build.current().isSnapshot()); + + assertStatement("METRICS foo", new EsqlUnresolvedRelation(EMPTY, new TableIdentifier(EMPTY, null, "foo"), List.of())); + assertStatement("METRICS foo,bar", new EsqlUnresolvedRelation(EMPTY, new TableIdentifier(EMPTY, null, "foo,bar"), List.of())); + assertStatement("METRICS foo*,bar", new EsqlUnresolvedRelation(EMPTY, new TableIdentifier(EMPTY, null, "foo*,bar"), List.of())); + assertStatement("METRICS foo-*,bar", new EsqlUnresolvedRelation(EMPTY, new TableIdentifier(EMPTY, null, "foo-*,bar"), List.of())); + assertStatement( + "METRICS foo-*,bar+*", + new EsqlUnresolvedRelation(EMPTY, new TableIdentifier(EMPTY, null, "foo-*,bar+*"), List.of()) + ); + } + + public void testMetricsIdentifiers() { + assumeTrue("requires snapshot build", Build.current().isSnapshot()); + Map patterns = Map.of( + "metrics foo,test-*", + "foo,test-*", + "metrics 123-test@foo_bar+baz1", + "123-test@foo_bar+baz1", + "metrics foo, test,xyz", + "foo,test,xyz", + "metrics >", + ">" + ); + for (Map.Entry e : patterns.entrySet()) { + assertStatement(e.getKey(), new EsqlUnresolvedRelation(EMPTY, new TableIdentifier(EMPTY, null, e.getValue()), List.of())); + } + } + + public void testSimpleMetricsWithStats() { + assumeTrue("requires snapshot build", Build.current().isSnapshot()); + assertStatement( + "METRICS foo load=avg(cpu) BY ts", + new EsqlAggregate( + EMPTY, + new EsqlUnresolvedRelation(EMPTY, new TableIdentifier(EMPTY, null, "foo"), List.of()), + List.of(attribute("ts")), + List.of(new Alias(EMPTY, "load", new UnresolvedFunction(EMPTY, "avg", DEFAULT, List.of(attribute("cpu")))), attribute("ts")) + ) + ); + assertStatement( + "METRICS foo,bar load=avg(cpu) BY ts", + new EsqlAggregate( + EMPTY, + new EsqlUnresolvedRelation(EMPTY, new TableIdentifier(EMPTY, null, "foo,bar"), List.of()), + List.of(attribute("ts")), + List.of(new Alias(EMPTY, "load", new UnresolvedFunction(EMPTY, "avg", DEFAULT, List.of(attribute("cpu")))), attribute("ts")) + ) + ); + assertStatement( + "METRICS foo,bar load=avg(cpu),max(rate(requests)) BY ts", + new EsqlAggregate( + EMPTY, + new EsqlUnresolvedRelation(EMPTY, new TableIdentifier(EMPTY, null, "foo,bar"), List.of()), + List.of(attribute("ts")), + List.of( + new Alias(EMPTY, "load", new UnresolvedFunction(EMPTY, "avg", DEFAULT, List.of(attribute("cpu")))), + new Alias( + EMPTY, + "max(rate(requests))", + new UnresolvedFunction( + EMPTY, + "max", + DEFAULT, + List.of(new UnresolvedFunction(EMPTY, "rate", DEFAULT, List.of(attribute("requests")))) + ) + ), + attribute("ts") + ) + ) + ); + assertStatement( + "METRICS foo* count(errors)", + new EsqlAggregate( + EMPTY, + new EsqlUnresolvedRelation(EMPTY, new TableIdentifier(EMPTY, null, "foo*"), List.of()), + List.of(), + List.of(new Alias(EMPTY, "count(errors)", new UnresolvedFunction(EMPTY, "count", DEFAULT, List.of(attribute("errors"))))) + ) + ); + assertStatement( + "METRICS foo* a(b)", + new EsqlAggregate( + EMPTY, + new EsqlUnresolvedRelation(EMPTY, new TableIdentifier(EMPTY, null, "foo*"), List.of()), + List.of(), + List.of(new Alias(EMPTY, "a(b)", new UnresolvedFunction(EMPTY, "a", DEFAULT, List.of(attribute("b"))))) + ) + ); + assertStatement( + "METRICS foo* a(b)", + new EsqlAggregate( + EMPTY, + new EsqlUnresolvedRelation(EMPTY, new TableIdentifier(EMPTY, null, "foo*"), List.of()), + List.of(), + List.of(new Alias(EMPTY, "a(b)", new UnresolvedFunction(EMPTY, "a", DEFAULT, List.of(attribute("b"))))) + ) + ); + assertStatement( + "METRICS foo* a1(b2)", + new EsqlAggregate( + EMPTY, + new EsqlUnresolvedRelation(EMPTY, new TableIdentifier(EMPTY, null, "foo*"), List.of()), + List.of(), + List.of(new Alias(EMPTY, "a1(b2)", new UnresolvedFunction(EMPTY, "a1", DEFAULT, List.of(attribute("b2"))))) + ) + ); + assertStatement( + "METRICS foo*,bar* b = min(a) by c, d.e", + new EsqlAggregate( + EMPTY, + new EsqlUnresolvedRelation(EMPTY, new TableIdentifier(EMPTY, null, "foo*,bar*"), List.of()), + List.of(attribute("c"), attribute("d.e")), + List.of( + new Alias(EMPTY, "b", new UnresolvedFunction(EMPTY, "min", DEFAULT, List.of(attribute("a")))), + attribute("c"), + attribute("d.e") + ) + ) + ); + } + + public void testMetricWithGroupKeyAsAgg() { + assumeTrue("requires snapshot build", Build.current().isSnapshot()); + var queries = List.of("METRICS foo a BY a"); + for (String query : queries) { + expectVerificationError(query, "grouping key [a] already specified in the STATS BY clause"); + } + } + + private void assertStatement(String statement, LogicalPlan expected) { + final LogicalPlan actual; + try { + actual = statement(statement); + } catch (Exception e) { + throw new AssertionError("parsing error for [" + statement + "]", e); + } + assertThat(statement, actual, equalTo(expected)); + } + private LogicalPlan statement(String e) { return statement(e, List.of()); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalSupplierTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalSupplierTests.java new file mode 100644 index 0000000000000..4206adf1492fd --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalSupplierTests.java @@ -0,0 +1,87 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.plan.logical.local; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.breaker.NoopCircuitBreaker; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.test.AbstractWireTestCase; +import org.elasticsearch.xpack.esql.io.stream.PlanNameRegistry; +import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; +import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; + +import java.io.IOException; +import java.util.Arrays; + +public class LocalSupplierTests extends AbstractWireTestCase { + private static final BlockFactory BLOCK_FACTORY = BlockFactory.getInstance( + new NoopCircuitBreaker("noop-esql-breaker"), + BigArrays.NON_RECYCLING_INSTANCE + ); + + @Override + protected LocalSupplier copyInstance(LocalSupplier instance, TransportVersion version) throws IOException { + try (BytesStreamOutput output = new BytesStreamOutput()) { + output.setTransportVersion(version); + instance.writeTo(new PlanStreamOutput(output, PlanNameRegistry.INSTANCE, null)); + try (StreamInput in = output.bytes().streamInput()) { + in.setTransportVersion(version); + return LocalSupplier.readFrom(new PlanStreamInput(in, PlanNameRegistry.INSTANCE, getNamedWriteableRegistry(), null)); + } + } + } + + @Override + protected LocalSupplier createTestInstance() { + return randomBoolean() ? LocalSupplier.EMPTY : randomNonEmpty(); + } + + private LocalSupplier randomNonEmpty() { + return LocalSupplier.of(randomList(1, 10, LocalSupplierTests::randomBlock).toArray(Block[]::new)); + } + + @Override + protected LocalSupplier mutateInstance(LocalSupplier instance) throws IOException { + Block[] blocks = instance.get(); + if (blocks.length > 0 && randomBoolean()) { + if (randomBoolean()) { + return LocalSupplier.EMPTY; + } + return LocalSupplier.of(Arrays.copyOf(blocks, blocks.length - 1, Block[].class)); + } + blocks = Arrays.copyOf(blocks, blocks.length + 1, Block[].class); + blocks[blocks.length - 1] = randomBlock(); + return LocalSupplier.of(blocks); + } + + private static Block randomBlock() { + int len = between(1, 1000); + try (IntBlock.Builder ints = BLOCK_FACTORY.newIntBlockBuilder(len)) { + for (int i = 0; i < len; i++) { + ints.appendInt(randomInt()); + } + return ints.build(); + } + } + + @Override + protected boolean shouldBeSame(LocalSupplier newInstance) { + return newInstance.get().length == 0; + } + + @Override + protected NamedWriteableRegistry getNamedWriteableRegistry() { + return new NamedWriteableRegistry(Block.getNamedWriteables()); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequestTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequestTests.java index 45d57b2fa411e..c9c5091db2894 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequestTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequestTests.java @@ -86,7 +86,7 @@ protected DataNodeRequest createTestInstance() { ); DataNodeRequest request = new DataNodeRequest( sessionId, - EsqlConfigurationSerializationTests.randomConfiguration(query), + EsqlConfigurationSerializationTests.randomConfiguration(query, EsqlConfigurationSerializationTests.randomTables()), randomAlphaOfLength(10), shardIds, aliasFilters, diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/EsqlConfigurationSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/EsqlConfigurationSerializationTests.java index 3e91321651928..41c39e88b943e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/EsqlConfigurationSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/EsqlConfigurationSerializationTests.java @@ -53,10 +53,10 @@ private static QueryPragmas randomQueryPragmas() { public static EsqlConfiguration randomConfiguration() { int len = randomIntBetween(1, 300) + (frequently() ? 0 : QUERY_COMPRESS_THRESHOLD_CHARS); - return randomConfiguration(randomRealisticUnicodeOfLength(len)); + return randomConfiguration(randomRealisticUnicodeOfLength(len), randomTables()); } - public static EsqlConfiguration randomConfiguration(String query) { + public static EsqlConfiguration randomConfiguration(String query, Map> tables) { var zoneId = randomZone(); var locale = randomLocale(random()); var username = randomAlphaOfLengthBetween(1, 10); @@ -75,11 +75,11 @@ public static EsqlConfiguration randomConfiguration(String query) { defaultTruncation, query, profile, - randomTables() + tables ); } - static Map> randomTables() { + public static Map> randomTables() { if (randomBoolean()) { return Map.of(); } diff --git a/x-pack/plugin/inference/build.gradle b/x-pack/plugin/inference/build.gradle index 48b6156a43039..5429e46a1d3fe 100644 --- a/x-pack/plugin/inference/build.gradle +++ b/x-pack/plugin/inference/build.gradle @@ -4,6 +4,8 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ +import org.elasticsearch.gradle.internal.info.BuildParams + apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' apply plugin: 'elasticsearch.internal-yaml-rest-test' @@ -36,6 +38,12 @@ dependencies { api "com.ibm.icu:icu4j:${versions.icu4j}" } +if (BuildParams.isSnapshotBuild() == false) { + tasks.named("test").configure { + systemProperty 'es.semantic_text_feature_flag_enabled', 'true' + } +} + tasks.named('yamlRestTest') { usesDefaultDistribution() } diff --git a/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/AzureOpenAiServiceUpgradeIT.java b/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/AzureOpenAiServiceUpgradeIT.java index db5e62a367ab3..d475fd099d4ac 100644 --- a/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/AzureOpenAiServiceUpgradeIT.java +++ b/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/AzureOpenAiServiceUpgradeIT.java @@ -59,16 +59,16 @@ public void testOpenAiEmbeddings() throws IOException { openAiEmbeddingsServer.enqueue(new MockResponse().setResponseCode(200).setBody(OpenAiServiceUpgradeIT.embeddingResponse())); put(oldClusterId, embeddingConfig(getUrl(openAiEmbeddingsServer)), TaskType.TEXT_EMBEDDING); - var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterId).get("models"); + var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterId).get("endpoints"); assertThat(configs, hasSize(1)); } else if (isMixedCluster()) { - var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterId).get("models"); + var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterId).get("endpoints"); assertEquals("azureopenai", configs.get(0).get("service")); assertEmbeddingInference(oldClusterId); } else if (isUpgradedCluster()) { // check old cluster model - var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterId).get("models"); + var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterId).get("endpoints"); var serviceSettings = (Map) configs.get(0).get("service_settings"); // Inference on old cluster model @@ -77,7 +77,7 @@ public void testOpenAiEmbeddings() throws IOException { openAiEmbeddingsServer.enqueue(new MockResponse().setResponseCode(200).setBody(OpenAiServiceUpgradeIT.embeddingResponse())); put(upgradedClusterId, embeddingConfig(getUrl(openAiEmbeddingsServer)), TaskType.TEXT_EMBEDDING); - configs = (List>) get(TaskType.TEXT_EMBEDDING, upgradedClusterId).get("models"); + configs = (List>) get(TaskType.TEXT_EMBEDDING, upgradedClusterId).get("endpoints"); assertThat(configs, hasSize(1)); // Inference on the new config diff --git a/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/CohereServiceUpgradeIT.java b/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/CohereServiceUpgradeIT.java index c73827dba2cbb..c889d8f9b312a 100644 --- a/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/CohereServiceUpgradeIT.java +++ b/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/CohereServiceUpgradeIT.java @@ -71,7 +71,7 @@ public void testCohereEmbeddings() throws IOException { cohereEmbeddingsServer.enqueue(new MockResponse().setResponseCode(200).setBody(embeddingResponseFloat())); put(oldClusterIdFloat, embeddingConfigFloat(getUrl(cohereEmbeddingsServer)), TaskType.TEXT_EMBEDDING); - var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterIdInt8).get("models"); + var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterIdInt8).get("endpoints"); assertThat(configs, hasSize(1)); assertEquals("cohere", configs.get(0).get("service")); var serviceSettings = (Map) configs.get(0).get("service_settings"); @@ -83,7 +83,7 @@ public void testCohereEmbeddings() throws IOException { assertEmbeddingInference(oldClusterIdInt8, CohereEmbeddingType.BYTE); assertEmbeddingInference(oldClusterIdFloat, CohereEmbeddingType.FLOAT); } else if (isMixedCluster()) { - var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterIdInt8).get("models"); + var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterIdInt8).get("endpoints"); assertEquals("cohere", configs.get(0).get("service")); var serviceSettings = (Map) configs.get(0).get("service_settings"); assertThat(serviceSettings, hasEntry("model_id", "embed-english-light-v3.0")); @@ -91,7 +91,7 @@ public void testCohereEmbeddings() throws IOException { // An upgraded node will report the embedding type as byte, an old node int8 assertThat(embeddingType, Matchers.is(oneOf("int8", "byte"))); - configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterIdFloat).get("models"); + configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterIdFloat).get("endpoints"); serviceSettings = (Map) configs.get(0).get("service_settings"); assertThat(serviceSettings, hasEntry("embedding_type", "float")); @@ -99,7 +99,7 @@ public void testCohereEmbeddings() throws IOException { assertEmbeddingInference(oldClusterIdFloat, CohereEmbeddingType.FLOAT); } else if (isUpgradedCluster()) { // check old cluster model - var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterIdInt8).get("models"); + var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterIdInt8).get("endpoints"); var serviceSettings = (Map) configs.get(0).get("service_settings"); assertThat(serviceSettings, hasEntry("model_id", "embed-english-light-v3.0")); assertThat(serviceSettings, hasEntry("embedding_type", "byte")); @@ -116,7 +116,7 @@ public void testCohereEmbeddings() throws IOException { cohereEmbeddingsServer.enqueue(new MockResponse().setResponseCode(200).setBody(embeddingResponseByte())); put(upgradedClusterIdByte, embeddingConfigByte(getUrl(cohereEmbeddingsServer)), TaskType.TEXT_EMBEDDING); - configs = (List>) get(TaskType.TEXT_EMBEDDING, upgradedClusterIdByte).get("models"); + configs = (List>) get(TaskType.TEXT_EMBEDDING, upgradedClusterIdByte).get("endpoints"); serviceSettings = (Map) configs.get(0).get("service_settings"); assertThat(serviceSettings, hasEntry("embedding_type", "byte")); @@ -129,7 +129,7 @@ public void testCohereEmbeddings() throws IOException { cohereEmbeddingsServer.enqueue(new MockResponse().setResponseCode(200).setBody(embeddingResponseByte())); put(upgradedClusterIdInt8, embeddingConfigInt8(getUrl(cohereEmbeddingsServer)), TaskType.TEXT_EMBEDDING); - configs = (List>) get(TaskType.TEXT_EMBEDDING, upgradedClusterIdInt8).get("models"); + configs = (List>) get(TaskType.TEXT_EMBEDDING, upgradedClusterIdInt8).get("endpoints"); serviceSettings = (Map) configs.get(0).get("service_settings"); assertThat(serviceSettings, hasEntry("embedding_type", "byte")); // int8 rewritten to byte @@ -141,7 +141,7 @@ public void testCohereEmbeddings() throws IOException { cohereEmbeddingsServer.enqueue(new MockResponse().setResponseCode(200).setBody(embeddingResponseFloat())); put(upgradedClusterIdFloat, embeddingConfigFloat(getUrl(cohereEmbeddingsServer)), TaskType.TEXT_EMBEDDING); - configs = (List>) get(TaskType.TEXT_EMBEDDING, upgradedClusterIdFloat).get("models"); + configs = (List>) get(TaskType.TEXT_EMBEDDING, upgradedClusterIdFloat).get("endpoints"); serviceSettings = (Map) configs.get(0).get("service_settings"); assertThat(serviceSettings, hasEntry("embedding_type", "float")); @@ -179,12 +179,12 @@ public void testRerank() throws IOException { if (isOldCluster()) { put(oldClusterId, rerankConfig(getUrl(cohereRerankServer)), TaskType.RERANK); - var configs = (List>) get(TaskType.RERANK, oldClusterId).get("models"); + var configs = (List>) get(TaskType.RERANK, oldClusterId).get("endpoints"); assertThat(configs, hasSize(1)); assertRerank(oldClusterId); } else if (isMixedCluster()) { - var configs = (List>) get(TaskType.RERANK, oldClusterId).get("models"); + var configs = (List>) get(TaskType.RERANK, oldClusterId).get("endpoints"); assertEquals("cohere", configs.get(0).get("service")); var serviceSettings = (Map) configs.get(0).get("service_settings"); assertThat(serviceSettings, hasEntry("model_id", "rerank-english-v3.0")); @@ -195,7 +195,7 @@ public void testRerank() throws IOException { } else if (isUpgradedCluster()) { // check old cluster model - var configs = (List>) get(TaskType.RERANK, oldClusterId).get("models"); + var configs = (List>) get(TaskType.RERANK, oldClusterId).get("endpoints"); assertEquals("cohere", configs.get(0).get("service")); var serviceSettings = (Map) configs.get(0).get("service_settings"); assertThat(serviceSettings, hasEntry("model_id", "rerank-english-v3.0")); @@ -206,7 +206,7 @@ public void testRerank() throws IOException { // New endpoint put(upgradedClusterId, rerankConfig(getUrl(cohereRerankServer)), TaskType.RERANK); - configs = (List>) get(upgradedClusterId).get("models"); + configs = (List>) get(upgradedClusterId).get("endpoints"); assertThat(configs, hasSize(1)); assertRerank(upgradedClusterId); diff --git a/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/HuggingFaceServiceUpgradeIT.java b/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/HuggingFaceServiceUpgradeIT.java index 718678f97f37f..899a02776195d 100644 --- a/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/HuggingFaceServiceUpgradeIT.java +++ b/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/HuggingFaceServiceUpgradeIT.java @@ -63,18 +63,18 @@ public void testHFEmbeddings() throws IOException { embeddingsServer.enqueue(new MockResponse().setResponseCode(200).setBody(embeddingResponse())); put(oldClusterId, embeddingConfig(getUrl(embeddingsServer)), TaskType.TEXT_EMBEDDING); - var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterId).get("models"); + var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterId).get("endpoints"); assertThat(configs, hasSize(1)); assertEmbeddingInference(oldClusterId); } else if (isMixedCluster()) { - var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterId).get("models"); + var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterId).get("endpoints"); assertEquals("hugging_face", configs.get(0).get("service")); assertEmbeddingInference(oldClusterId); } else if (isUpgradedCluster()) { // check old cluster model - var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterId).get("models"); + var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterId).get("endpoints"); assertEquals("hugging_face", configs.get(0).get("service")); // Inference on old cluster model @@ -83,7 +83,7 @@ public void testHFEmbeddings() throws IOException { embeddingsServer.enqueue(new MockResponse().setResponseCode(200).setBody(embeddingResponse())); put(upgradedClusterId, embeddingConfig(getUrl(embeddingsServer)), TaskType.TEXT_EMBEDDING); - configs = (List>) get(TaskType.TEXT_EMBEDDING, upgradedClusterId).get("models"); + configs = (List>) get(TaskType.TEXT_EMBEDDING, upgradedClusterId).get("endpoints"); assertThat(configs, hasSize(1)); assertEmbeddingInference(upgradedClusterId); @@ -110,17 +110,17 @@ public void testElser() throws IOException { if (isOldCluster()) { put(oldClusterId, elserConfig(getUrl(elserServer)), TaskType.SPARSE_EMBEDDING); - var configs = (List>) get(TaskType.SPARSE_EMBEDDING, oldClusterId).get("models"); + var configs = (List>) get(TaskType.SPARSE_EMBEDDING, oldClusterId).get("endpoints"); assertThat(configs, hasSize(1)); assertElser(oldClusterId); } else if (isMixedCluster()) { - var configs = (List>) get(TaskType.SPARSE_EMBEDDING, oldClusterId).get("models"); + var configs = (List>) get(TaskType.SPARSE_EMBEDDING, oldClusterId).get("endpoints"); assertEquals("hugging_face", configs.get(0).get("service")); assertElser(oldClusterId); } else if (isUpgradedCluster()) { // check old cluster model - var configs = (List>) get(TaskType.SPARSE_EMBEDDING, oldClusterId).get("models"); + var configs = (List>) get(TaskType.SPARSE_EMBEDDING, oldClusterId).get("endpoints"); assertEquals("hugging_face", configs.get(0).get("service")); var taskSettings = (Map) configs.get(0).get("task_settings"); assertThat(taskSettings.keySet(), empty()); @@ -129,7 +129,7 @@ public void testElser() throws IOException { // New endpoint put(upgradedClusterId, elserConfig(getUrl(elserServer)), TaskType.SPARSE_EMBEDDING); - configs = (List>) get(upgradedClusterId).get("models"); + configs = (List>) get(upgradedClusterId).get("endpoints"); assertThat(configs, hasSize(1)); assertElser(upgradedClusterId); diff --git a/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/InferenceUpgradeTestCase.java b/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/InferenceUpgradeTestCase.java index fe08db9b94b89..ecfec2304c8a1 100644 --- a/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/InferenceUpgradeTestCase.java +++ b/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/InferenceUpgradeTestCase.java @@ -13,7 +13,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.inference.TaskType; import org.elasticsearch.test.http.MockWebServer; -import org.elasticsearch.upgrades.ParameterizedRollingUpgradeTestCase; +import org.elasticsearch.upgrades.AbstractRollingUpgradeTestCase; import java.io.IOException; import java.util.List; @@ -21,7 +21,7 @@ import static org.elasticsearch.core.Strings.format; -public class InferenceUpgradeTestCase extends ParameterizedRollingUpgradeTestCase { +public class InferenceUpgradeTestCase extends AbstractRollingUpgradeTestCase { public InferenceUpgradeTestCase(@Name("upgradedNodes") int upgradedNodes) { super(upgradedNodes); diff --git a/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/OpenAiServiceUpgradeIT.java b/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/OpenAiServiceUpgradeIT.java index 4e8e1c845b070..bfdcb0e0d5ed4 100644 --- a/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/OpenAiServiceUpgradeIT.java +++ b/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/OpenAiServiceUpgradeIT.java @@ -65,12 +65,12 @@ public void testOpenAiEmbeddings() throws IOException { openAiEmbeddingsServer.enqueue(new MockResponse().setResponseCode(200).setBody(embeddingResponse())); put(oldClusterId, inferenceConfig, TaskType.TEXT_EMBEDDING); - var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterId).get("models"); + var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterId).get("endpoints"); assertThat(configs, hasSize(1)); assertEmbeddingInference(oldClusterId); } else if (isMixedCluster()) { - var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterId).get("models"); + var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterId).get("endpoints"); assertEquals("openai", configs.get(0).get("service")); var serviceSettings = (Map) configs.get(0).get("service_settings"); var taskSettings = (Map) configs.get(0).get("task_settings"); @@ -80,7 +80,7 @@ public void testOpenAiEmbeddings() throws IOException { assertEmbeddingInference(oldClusterId); } else if (isUpgradedCluster()) { // check old cluster model - var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterId).get("models"); + var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterId).get("endpoints"); var serviceSettings = (Map) configs.get(0).get("service_settings"); // model id is moved to service settings assertThat(serviceSettings, hasEntry("model_id", "text-embedding-ada-002")); @@ -94,7 +94,7 @@ public void testOpenAiEmbeddings() throws IOException { openAiEmbeddingsServer.enqueue(new MockResponse().setResponseCode(200).setBody(embeddingResponse())); put(upgradedClusterId, inferenceConfig, TaskType.TEXT_EMBEDDING); - configs = (List>) get(TaskType.TEXT_EMBEDDING, upgradedClusterId).get("models"); + configs = (List>) get(TaskType.TEXT_EMBEDDING, upgradedClusterId).get("endpoints"); assertThat(configs, hasSize(1)); assertEmbeddingInference(upgradedClusterId); @@ -122,12 +122,12 @@ public void testOpenAiCompletions() throws IOException { if (isOldCluster()) { put(oldClusterId, chatCompletionsConfig(getUrl(openAiChatCompletionsServer)), TaskType.COMPLETION); - var configs = (List>) get(TaskType.COMPLETION, oldClusterId).get("models"); + var configs = (List>) get(TaskType.COMPLETION, oldClusterId).get("endpoints"); assertThat(configs, hasSize(1)); assertCompletionInference(oldClusterId); } else if (isMixedCluster()) { - var configs = (List>) get(TaskType.COMPLETION, oldClusterId).get("models"); + var configs = (List>) get(TaskType.COMPLETION, oldClusterId).get("endpoints"); assertEquals("openai", configs.get(0).get("service")); var serviceSettings = (Map) configs.get(0).get("service_settings"); assertThat(serviceSettings, hasEntry("model_id", "gpt-4")); @@ -137,7 +137,7 @@ public void testOpenAiCompletions() throws IOException { assertCompletionInference(oldClusterId); } else if (isUpgradedCluster()) { // check old cluster model - var configs = (List>) get(TaskType.COMPLETION, oldClusterId).get("models"); + var configs = (List>) get(TaskType.COMPLETION, oldClusterId).get("endpoints"); var serviceSettings = (Map) configs.get(0).get("service_settings"); assertThat(serviceSettings, hasEntry("model_id", "gpt-4")); var taskSettings = (Map) configs.get(0).get("task_settings"); @@ -146,7 +146,7 @@ public void testOpenAiCompletions() throws IOException { assertCompletionInference(oldClusterId); put(upgradedClusterId, chatCompletionsConfig(getUrl(openAiChatCompletionsServer)), TaskType.COMPLETION); - configs = (List>) get(TaskType.COMPLETION, upgradedClusterId).get("models"); + configs = (List>) get(TaskType.COMPLETION, upgradedClusterId).get("endpoints"); assertThat(configs, hasSize(1)); // Inference on the new config diff --git a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestDenseInferenceServiceExtension.java b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestDenseInferenceServiceExtension.java index 85a834592f2af..a54b14d8fad18 100644 --- a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestDenseInferenceServiceExtension.java +++ b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestDenseInferenceServiceExtension.java @@ -135,7 +135,7 @@ private List makeChunkedResults(List inp var results = new ArrayList(); for (int i = 0; i < input.size(); i++) { double[] values = new double[dimensions]; - for (int j = 0; j < 5; j++) { + for (int j = 0; j < dimensions; j++) { values[j] = stringWeight(input.get(i), j); } results.add( diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java index 8d01b25aa2795..41bef3521cdf2 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java @@ -26,6 +26,8 @@ import org.elasticsearch.xpack.core.inference.results.TextEmbeddingByteResults; import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiSecretSettings; +import org.elasticsearch.xpack.inference.services.azureopenai.completion.AzureOpenAiCompletionServiceSettings; +import org.elasticsearch.xpack.inference.services.azureopenai.completion.AzureOpenAiCompletionTaskSettings; import org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsServiceSettings; import org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsTaskSettings; import org.elasticsearch.xpack.inference.services.cohere.CohereServiceSettings; @@ -237,6 +239,21 @@ public static List getNamedWriteables() { ) ); + namedWriteables.add( + new NamedWriteableRegistry.Entry( + ServiceSettings.class, + AzureOpenAiCompletionServiceSettings.NAME, + AzureOpenAiCompletionServiceSettings::new + ) + ); + namedWriteables.add( + new NamedWriteableRegistry.Entry( + TaskSettings.class, + AzureOpenAiCompletionTaskSettings.NAME, + AzureOpenAiCompletionTaskSettings::new + ) + ); + return namedWriteables; } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilter.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilter.java new file mode 100644 index 0000000000000..38d8b8d9b35c0 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilter.java @@ -0,0 +1,536 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.action.filter; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.DocWriteRequest; +import org.elasticsearch.action.bulk.BulkItemRequest; +import org.elasticsearch.action.bulk.BulkShardRequest; +import org.elasticsearch.action.bulk.TransportShardBulkAction; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.support.ActionFilterChain; +import org.elasticsearch.action.support.MappedActionFilter; +import org.elasticsearch.action.support.RefCountingRunnable; +import org.elasticsearch.action.update.UpdateRequest; +import org.elasticsearch.cluster.metadata.InferenceFieldMetadata; +import org.elasticsearch.common.util.concurrent.AtomicArray; +import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.inference.ChunkedInferenceServiceResults; +import org.elasticsearch.inference.ChunkingOptions; +import org.elasticsearch.inference.InferenceService; +import org.elasticsearch.inference.InferenceServiceRegistry; +import org.elasticsearch.inference.InputType; +import org.elasticsearch.inference.Model; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.xpack.core.inference.results.ErrorChunkedInferenceResults; +import org.elasticsearch.xpack.inference.mapper.SemanticTextField; +import org.elasticsearch.xpack.inference.mapper.SemanticTextFieldMapper; +import org.elasticsearch.xpack.inference.registry.ModelRegistry; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.Comparator; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +import static org.elasticsearch.xpack.inference.mapper.SemanticTextField.toSemanticTextFieldChunks; + +/** + * A {@link MappedActionFilter} that intercepts {@link BulkShardRequest} to apply inference on fields specified + * as {@link SemanticTextFieldMapper} in the index mapping. For each semantic text field referencing fields in + * the request source, we generate embeddings and include the results in the source under the semantic text field + * name as a {@link SemanticTextField}. + * This transformation happens on the bulk coordinator node, and the {@link SemanticTextFieldMapper} parses the + * results during indexing on the shard. + * + * TODO: batchSize should be configurable via a cluster setting + */ +public class ShardBulkInferenceActionFilter implements MappedActionFilter { + protected static final int DEFAULT_BATCH_SIZE = 512; + + private final InferenceServiceRegistry inferenceServiceRegistry; + private final ModelRegistry modelRegistry; + private final int batchSize; + + public ShardBulkInferenceActionFilter(InferenceServiceRegistry inferenceServiceRegistry, ModelRegistry modelRegistry) { + this(inferenceServiceRegistry, modelRegistry, DEFAULT_BATCH_SIZE); + } + + public ShardBulkInferenceActionFilter(InferenceServiceRegistry inferenceServiceRegistry, ModelRegistry modelRegistry, int batchSize) { + this.inferenceServiceRegistry = inferenceServiceRegistry; + this.modelRegistry = modelRegistry; + this.batchSize = batchSize; + } + + @Override + public int order() { + // must execute last (after the security action filter) + return Integer.MAX_VALUE; + } + + @Override + public String actionName() { + return TransportShardBulkAction.ACTION_NAME; + } + + @Override + public void apply( + Task task, + String action, + Request request, + ActionListener listener, + ActionFilterChain chain + ) { + if (TransportShardBulkAction.ACTION_NAME.equals(action)) { + BulkShardRequest bulkShardRequest = (BulkShardRequest) request; + var fieldInferenceMetadata = bulkShardRequest.consumeInferenceFieldMap(); + if (fieldInferenceMetadata != null && fieldInferenceMetadata.isEmpty() == false) { + Runnable onInferenceCompletion = () -> chain.proceed(task, action, request, listener); + processBulkShardRequest(fieldInferenceMetadata, bulkShardRequest, onInferenceCompletion); + return; + } + } + chain.proceed(task, action, request, listener); + } + + private void processBulkShardRequest( + Map fieldInferenceMap, + BulkShardRequest bulkShardRequest, + Runnable onCompletion + ) { + new AsyncBulkShardInferenceAction(fieldInferenceMap, bulkShardRequest, onCompletion).run(); + } + + private record InferenceProvider(InferenceService service, Model model) {} + + /** + * A field inference request on a single input. + * @param index The index of the request in the original bulk request. + * @param field The target field. + * @param input The input to run inference on. + * @param inputOrder The original order of the input. + * @param isOriginalFieldInput Whether the input is part of the original values of the field. + */ + private record FieldInferenceRequest(int index, String field, String input, int inputOrder, boolean isOriginalFieldInput) {} + + /** + * The field inference response. + * @param field The target field. + * @param input The input that was used to run inference. + * @param inputOrder The original order of the input. + * @param isOriginalFieldInput Whether the input is part of the original values of the field. + * @param model The model used to run inference. + * @param chunkedResults The actual results. + */ + private record FieldInferenceResponse( + String field, + String input, + int inputOrder, + boolean isOriginalFieldInput, + Model model, + ChunkedInferenceServiceResults chunkedResults + ) {} + + private record FieldInferenceResponseAccumulator( + int id, + Map> responses, + List failures + ) { + void addOrUpdateResponse(FieldInferenceResponse response) { + synchronized (this) { + var list = responses.computeIfAbsent(response.field, k -> new ArrayList<>()); + list.add(response); + } + } + + void addFailure(Exception exc) { + synchronized (this) { + failures.add(exc); + } + } + } + + private class AsyncBulkShardInferenceAction implements Runnable { + private final Map fieldInferenceMap; + private final BulkShardRequest bulkShardRequest; + private final Runnable onCompletion; + private final AtomicArray inferenceResults; + + private AsyncBulkShardInferenceAction( + Map fieldInferenceMap, + BulkShardRequest bulkShardRequest, + Runnable onCompletion + ) { + this.fieldInferenceMap = fieldInferenceMap; + this.bulkShardRequest = bulkShardRequest; + this.inferenceResults = new AtomicArray<>(bulkShardRequest.items().length); + this.onCompletion = onCompletion; + } + + @Override + public void run() { + Map> inferenceRequests = createFieldInferenceRequests(bulkShardRequest); + Runnable onInferenceCompletion = () -> { + try { + for (var inferenceResponse : inferenceResults.asList()) { + var request = bulkShardRequest.items()[inferenceResponse.id]; + try { + applyInferenceResponses(request, inferenceResponse); + } catch (Exception exc) { + request.abort(bulkShardRequest.index(), exc); + } + } + } finally { + onCompletion.run(); + } + }; + try (var releaseOnFinish = new RefCountingRunnable(onInferenceCompletion)) { + for (var entry : inferenceRequests.entrySet()) { + executeShardBulkInferenceAsync(entry.getKey(), null, entry.getValue(), releaseOnFinish.acquire()); + } + } + } + + private void executeShardBulkInferenceAsync( + final String inferenceId, + @Nullable InferenceProvider inferenceProvider, + final List requests, + final Releasable onFinish + ) { + if (inferenceProvider == null) { + ActionListener modelLoadingListener = new ActionListener<>() { + @Override + public void onResponse(ModelRegistry.UnparsedModel unparsedModel) { + var service = inferenceServiceRegistry.getService(unparsedModel.service()); + if (service.isEmpty() == false) { + var provider = new InferenceProvider( + service.get(), + service.get() + .parsePersistedConfigWithSecrets( + inferenceId, + unparsedModel.taskType(), + unparsedModel.settings(), + unparsedModel.secrets() + ) + ); + executeShardBulkInferenceAsync(inferenceId, provider, requests, onFinish); + } else { + try (onFinish) { + for (FieldInferenceRequest request : requests) { + inferenceResults.get(request.index).failures.add( + new ResourceNotFoundException( + "Inference service [{}] not found for field [{}]", + unparsedModel.service(), + request.field + ) + ); + } + } + } + } + + @Override + public void onFailure(Exception exc) { + try (onFinish) { + for (FieldInferenceRequest request : requests) { + Exception failure; + if (ExceptionsHelper.unwrap(exc, ResourceNotFoundException.class) instanceof ResourceNotFoundException) { + failure = new ResourceNotFoundException( + "Inference id [{}] not found for field [{}]", + inferenceId, + request.field + ); + } else { + failure = new ElasticsearchException( + "Error loading inference for inference id [{}] on field [{}]", + exc, + inferenceId, + request.field + ); + } + inferenceResults.get(request.index).failures.add(failure); + } + } + } + }; + modelRegistry.getModelWithSecrets(inferenceId, modelLoadingListener); + return; + } + int currentBatchSize = Math.min(requests.size(), batchSize); + final List currentBatch = requests.subList(0, currentBatchSize); + final List nextBatch = requests.subList(currentBatchSize, requests.size()); + final List inputs = currentBatch.stream().map(FieldInferenceRequest::input).collect(Collectors.toList()); + ActionListener> completionListener = new ActionListener<>() { + @Override + public void onResponse(List results) { + try { + var requestsIterator = requests.iterator(); + for (ChunkedInferenceServiceResults result : results) { + var request = requestsIterator.next(); + var acc = inferenceResults.get(request.index); + if (result instanceof ErrorChunkedInferenceResults error) { + acc.addFailure( + new ElasticsearchException( + "Exception when running inference id [{}] on field [{}]", + error.getException(), + inferenceProvider.model.getInferenceEntityId(), + request.field + ) + ); + } else { + acc.addOrUpdateResponse( + new FieldInferenceResponse( + request.field(), + request.input(), + request.inputOrder(), + request.isOriginalFieldInput(), + inferenceProvider.model, + result + ) + ); + } + } + } finally { + onFinish(); + } + } + + @Override + public void onFailure(Exception exc) { + try { + for (FieldInferenceRequest request : requests) { + addInferenceResponseFailure( + request.index, + new ElasticsearchException( + "Exception when running inference id [{}] on field [{}]", + exc, + inferenceProvider.model.getInferenceEntityId(), + request.field + ) + ); + } + } finally { + onFinish(); + } + } + + private void onFinish() { + if (nextBatch.isEmpty()) { + onFinish.close(); + } else { + executeShardBulkInferenceAsync(inferenceId, inferenceProvider, nextBatch, onFinish); + } + } + }; + inferenceProvider.service() + .chunkedInfer( + inferenceProvider.model(), + null, + inputs, + Map.of(), + InputType.INGEST, + new ChunkingOptions(null, null), + TimeValue.MAX_VALUE, + completionListener + ); + } + + private FieldInferenceResponseAccumulator ensureResponseAccumulatorSlot(int id) { + FieldInferenceResponseAccumulator acc = inferenceResults.get(id); + if (acc == null) { + acc = new FieldInferenceResponseAccumulator(id, new HashMap<>(), new ArrayList<>()); + inferenceResults.set(id, acc); + } + return acc; + } + + private void addInferenceResponseFailure(int id, Exception failure) { + var acc = ensureResponseAccumulatorSlot(id); + acc.addFailure(failure); + } + + /** + * Applies the {@link FieldInferenceResponseAccumulator} to the provided {@link BulkItemRequest}. + * If the response contains failures, the bulk item request is marked as failed for the downstream action. + * Otherwise, the source of the request is augmented with the field inference results under the + * {@link SemanticTextField#INFERENCE_FIELD} field. + */ + private void applyInferenceResponses(BulkItemRequest item, FieldInferenceResponseAccumulator response) { + if (response.failures().isEmpty() == false) { + for (var failure : response.failures()) { + item.abort(item.index(), failure); + } + return; + } + + final IndexRequest indexRequest = getIndexRequestOrNull(item.request()); + var newDocMap = indexRequest.sourceAsMap(); + for (var entry : response.responses.entrySet()) { + var fieldName = entry.getKey(); + var responses = entry.getValue(); + var model = responses.get(0).model(); + // ensure that the order in the original field is consistent in case of multiple inputs + Collections.sort(responses, Comparator.comparingInt(FieldInferenceResponse::inputOrder)); + List inputs = responses.stream().filter(r -> r.isOriginalFieldInput).map(r -> r.input).collect(Collectors.toList()); + List results = responses.stream().map(r -> r.chunkedResults).collect(Collectors.toList()); + var result = new SemanticTextField( + fieldName, + inputs, + new SemanticTextField.InferenceResult( + model.getInferenceEntityId(), + new SemanticTextField.ModelSettings(model), + toSemanticTextFieldChunks(fieldName, model.getInferenceEntityId(), results, indexRequest.getContentType()) + ), + indexRequest.getContentType() + ); + newDocMap.put(fieldName, result); + } + indexRequest.source(newDocMap, indexRequest.getContentType()); + } + + /** + * Register a {@link FieldInferenceRequest} for every non-empty field referencing an inference ID in the index. + * If results are already populated for fields in the original index request, the inference request for this specific + * field is skipped, and the existing results remain unchanged. + * Validation of inference ID and model settings occurs in the {@link SemanticTextFieldMapper} during field indexing, + * where an error will be thrown if they mismatch or if the content is malformed. + *

+ * TODO: We should validate the settings for pre-existing results here and apply the inference only if they differ? + */ + private Map> createFieldInferenceRequests(BulkShardRequest bulkShardRequest) { + Map> fieldRequestsMap = new LinkedHashMap<>(); + int itemIndex = 0; + for (var item : bulkShardRequest.items()) { + if (item.getPrimaryResponse() != null) { + // item was already aborted/processed by a filter in the chain upstream (e.g. security) + continue; + } + boolean isUpdateRequest = false; + final IndexRequest indexRequest; + if (item.request() instanceof IndexRequest ir) { + indexRequest = ir; + } else if (item.request() instanceof UpdateRequest updateRequest) { + isUpdateRequest = true; + if (updateRequest.script() != null) { + addInferenceResponseFailure( + item.id(), + new ElasticsearchStatusException( + "Cannot apply update with a script on indices that contain [{}] field(s)", + RestStatus.BAD_REQUEST, + SemanticTextFieldMapper.CONTENT_TYPE + ) + ); + continue; + } + indexRequest = updateRequest.doc(); + } else { + // ignore delete request + continue; + } + final Map docMap = indexRequest.sourceAsMap(); + for (var entry : fieldInferenceMap.values()) { + String field = entry.getName(); + String inferenceId = entry.getInferenceId(); + var originalFieldValue = XContentMapValues.extractValue(field, docMap); + if (originalFieldValue instanceof Map) { + continue; + } + int order = 0; + for (var sourceField : entry.getSourceFields()) { + boolean isOriginalFieldInput = sourceField.equals(field); + var valueObj = XContentMapValues.extractValue(sourceField, docMap); + if (valueObj == null) { + if (isUpdateRequest) { + addInferenceResponseFailure( + item.id(), + new ElasticsearchStatusException( + "Field [{}] must be specified on an update request to calculate inference for field [{}]", + RestStatus.BAD_REQUEST, + sourceField, + field + ) + ); + break; + } + continue; + } + ensureResponseAccumulatorSlot(itemIndex); + final List values; + try { + values = nodeStringValues(field, valueObj); + } catch (Exception exc) { + addInferenceResponseFailure(item.id(), exc); + break; + } + List fieldRequests = fieldRequestsMap.computeIfAbsent(inferenceId, k -> new ArrayList<>()); + for (var v : values) { + fieldRequests.add(new FieldInferenceRequest(itemIndex, field, v, order++, isOriginalFieldInput)); + } + } + } + itemIndex++; + } + return fieldRequestsMap; + } + } + + /** + * This method converts the given {@code valueObj} into a list of strings. + * If {@code valueObj} is not a string or a collection of strings, it throws an ElasticsearchStatusException. + */ + private static List nodeStringValues(String field, Object valueObj) { + if (valueObj instanceof String value) { + return List.of(value); + } else if (valueObj instanceof Collection values) { + List valuesString = new ArrayList<>(); + for (var v : values) { + if (v instanceof String value) { + valuesString.add(value); + } else { + throw new ElasticsearchStatusException( + "Invalid format for field [{}], expected [String] got [{}]", + RestStatus.BAD_REQUEST, + field, + valueObj.getClass().getSimpleName() + ); + } + } + return valuesString; + } + throw new ElasticsearchStatusException( + "Invalid format for field [{}], expected [String] got [{}]", + RestStatus.BAD_REQUEST, + field, + valueObj.getClass().getSimpleName() + ); + } + + static IndexRequest getIndexRequestOrNull(DocWriteRequest docWriteRequest) { + if (docWriteRequest instanceof IndexRequest indexRequest) { + return indexRequest; + } else if (docWriteRequest instanceof UpdateRequest updateRequest) { + return updateRequest.doc(); + } else { + return null; + } + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiActionCreator.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiActionCreator.java index 39eaaceae08bc..73ba286c9031a 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiActionCreator.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiActionCreator.java @@ -10,6 +10,7 @@ import org.elasticsearch.xpack.inference.external.action.ExecutableAction; import org.elasticsearch.xpack.inference.external.http.sender.Sender; import org.elasticsearch.xpack.inference.services.ServiceComponents; +import org.elasticsearch.xpack.inference.services.azureopenai.completion.AzureOpenAiCompletionModel; import org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsModel; import java.util.Map; @@ -32,4 +33,10 @@ public ExecutableAction create(AzureOpenAiEmbeddingsModel model, Map taskSettings) { + var overriddenModel = AzureOpenAiCompletionModel.of(model, taskSettings); + return new AzureOpenAiCompletionAction(sender, overriddenModel, serviceComponents); + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiActionVisitor.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiActionVisitor.java index 49d1ce61b12dd..f45c1d797085e 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiActionVisitor.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiActionVisitor.java @@ -8,10 +8,13 @@ package org.elasticsearch.xpack.inference.external.action.azureopenai; import org.elasticsearch.xpack.inference.external.action.ExecutableAction; +import org.elasticsearch.xpack.inference.services.azureopenai.completion.AzureOpenAiCompletionModel; import org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsModel; import java.util.Map; public interface AzureOpenAiActionVisitor { ExecutableAction create(AzureOpenAiEmbeddingsModel model, Map taskSettings); + + ExecutableAction create(AzureOpenAiCompletionModel model, Map taskSettings); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiCompletionAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiCompletionAction.java new file mode 100644 index 0000000000000..d38d02ef9620f --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiCompletionAction.java @@ -0,0 +1,67 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.action.azureopenai; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xpack.inference.external.action.ExecutableAction; +import org.elasticsearch.xpack.inference.external.http.sender.AzureOpenAiCompletionRequestManager; +import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput; +import org.elasticsearch.xpack.inference.external.http.sender.InferenceInputs; +import org.elasticsearch.xpack.inference.external.http.sender.Sender; +import org.elasticsearch.xpack.inference.services.ServiceComponents; +import org.elasticsearch.xpack.inference.services.azureopenai.completion.AzureOpenAiCompletionModel; + +import java.util.Objects; + +import static org.elasticsearch.xpack.inference.external.action.ActionUtils.constructFailedToSendRequestMessage; +import static org.elasticsearch.xpack.inference.external.action.ActionUtils.createInternalServerError; +import static org.elasticsearch.xpack.inference.external.action.ActionUtils.wrapFailuresInElasticsearchException; + +public class AzureOpenAiCompletionAction implements ExecutableAction { + + private final String errorMessage; + private final AzureOpenAiCompletionRequestManager requestCreator; + private final Sender sender; + + public AzureOpenAiCompletionAction(Sender sender, AzureOpenAiCompletionModel model, ServiceComponents serviceComponents) { + Objects.requireNonNull(serviceComponents); + Objects.requireNonNull(model); + this.sender = Objects.requireNonNull(sender); + this.requestCreator = new AzureOpenAiCompletionRequestManager(model, serviceComponents.threadPool()); + this.errorMessage = constructFailedToSendRequestMessage(model.getUri(), "Azure OpenAI completion"); + } + + @Override + public void execute(InferenceInputs inferenceInputs, TimeValue timeout, ActionListener listener) { + if (inferenceInputs instanceof DocumentsOnlyInput == false) { + listener.onFailure(new ElasticsearchStatusException("Invalid inference input type", RestStatus.INTERNAL_SERVER_ERROR)); + return; + } + + var docsOnlyInput = (DocumentsOnlyInput) inferenceInputs; + if (docsOnlyInput.getInputs().size() > 1) { + listener.onFailure(new ElasticsearchStatusException("Azure OpenAI completion only accepts 1 input", RestStatus.BAD_REQUEST)); + return; + } + + try { + ActionListener wrappedListener = wrapFailuresInElasticsearchException(errorMessage, listener); + + sender.send(requestCreator, inferenceInputs, timeout, wrappedListener); + } catch (ElasticsearchException e) { + listener.onFailure(e); + } catch (Exception e) { + listener.onFailure(createInternalServerError(e, errorMessage)); + } + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/azureopenai/AzureOpenAiAccount.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/azureopenai/AzureOpenAiAccount.java deleted file mode 100644 index db1f91cc751ee..0000000000000 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/azureopenai/AzureOpenAiAccount.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference.external.azureopenai; - -import org.elasticsearch.common.settings.SecureString; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsModel; - -import java.util.Objects; - -public record AzureOpenAiAccount( - String resourceName, - String deploymentId, - String apiVersion, - @Nullable SecureString apiKey, - @Nullable SecureString entraId -) { - - public AzureOpenAiAccount { - Objects.requireNonNull(resourceName); - Objects.requireNonNull(deploymentId); - Objects.requireNonNull(apiVersion); - Objects.requireNonNullElse(apiKey, entraId); - } - - public static AzureOpenAiAccount fromModel(AzureOpenAiEmbeddingsModel model) { - return new AzureOpenAiAccount( - model.getServiceSettings().resourceName(), - model.getServiceSettings().deploymentId(), - model.getServiceSettings().apiVersion(), - model.getSecretSettings().apiKey(), - model.getSecretSettings().entraId() - ); - } -} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/AzureOpenAiCompletionRequestManager.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/AzureOpenAiCompletionRequestManager.java new file mode 100644 index 0000000000000..2811155f6f357 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/AzureOpenAiCompletionRequestManager.java @@ -0,0 +1,58 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.http.sender; + +import org.apache.http.client.protocol.HttpClientContext; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.inference.external.azureopenai.AzureOpenAiResponseHandler; +import org.elasticsearch.xpack.inference.external.http.retry.RequestSender; +import org.elasticsearch.xpack.inference.external.http.retry.ResponseHandler; +import org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiCompletionRequest; +import org.elasticsearch.xpack.inference.external.response.azureopenai.AzureOpenAiCompletionResponseEntity; +import org.elasticsearch.xpack.inference.services.azureopenai.completion.AzureOpenAiCompletionModel; + +import java.util.List; +import java.util.Objects; +import java.util.function.Supplier; + +public class AzureOpenAiCompletionRequestManager extends AzureOpenAiRequestManager { + + private static final Logger logger = LogManager.getLogger(AzureOpenAiCompletionRequestManager.class); + + private static final ResponseHandler HANDLER = createCompletionHandler(); + + private final AzureOpenAiCompletionModel model; + + private static ResponseHandler createCompletionHandler() { + return new AzureOpenAiResponseHandler("azure openai completion", AzureOpenAiCompletionResponseEntity::fromResponse); + } + + public AzureOpenAiCompletionRequestManager(AzureOpenAiCompletionModel model, ThreadPool threadPool) { + super(threadPool, model); + this.model = Objects.requireNonNull(model); + } + + @Override + public Runnable create( + @Nullable String query, + List input, + RequestSender requestSender, + Supplier hasRequestCompletedFunction, + HttpClientContext context, + ActionListener listener + ) { + AzureOpenAiCompletionRequest request = new AzureOpenAiCompletionRequest(input, model); + return new ExecutableInferenceRequest(requestSender, logger, request, context, HANDLER, hasRequestCompletedFunction, listener); + } + +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiCompletionRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiCompletionRequest.java new file mode 100644 index 0000000000000..8854dc7950365 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiCompletionRequest.java @@ -0,0 +1,70 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.azureopenai; + +import org.apache.http.client.methods.HttpPost; +import org.apache.http.entity.ByteArrayEntity; +import org.elasticsearch.common.Strings; +import org.elasticsearch.xpack.inference.external.request.HttpRequest; +import org.elasticsearch.xpack.inference.external.request.Request; +import org.elasticsearch.xpack.inference.services.azureopenai.completion.AzureOpenAiCompletionModel; + +import java.net.URI; +import java.nio.charset.StandardCharsets; +import java.util.List; +import java.util.Objects; + +public class AzureOpenAiCompletionRequest implements AzureOpenAiRequest { + + private final List input; + + private final URI uri; + + private final AzureOpenAiCompletionModel model; + + public AzureOpenAiCompletionRequest(List input, AzureOpenAiCompletionModel model) { + this.input = input; + this.model = Objects.requireNonNull(model); + this.uri = model.getUri(); + } + + @Override + public HttpRequest createHttpRequest() { + var httpPost = new HttpPost(uri); + var requestEntity = Strings.toString(new AzureOpenAiCompletionRequestEntity(input, model.getTaskSettings().user())); + + ByteArrayEntity byteEntity = new ByteArrayEntity(requestEntity.getBytes(StandardCharsets.UTF_8)); + httpPost.setEntity(byteEntity); + + AzureOpenAiRequest.decorateWithAuthHeader(httpPost, model.getSecretSettings()); + + return new HttpRequest(httpPost, getInferenceEntityId()); + } + + @Override + public URI getURI() { + return this.uri; + } + + @Override + public String getInferenceEntityId() { + return model.getInferenceEntityId(); + } + + @Override + public Request truncate() { + // No truncation for Azure OpenAI completion + return this; + } + + @Override + public boolean[] getTruncationInfo() { + // No truncation for Azure OpenAI completion + return null; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiCompletionRequestEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiCompletionRequestEntity.java new file mode 100644 index 0000000000000..86614ef32855f --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiCompletionRequestEntity.java @@ -0,0 +1,64 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.azureopenai; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.List; +import java.util.Objects; + +public record AzureOpenAiCompletionRequestEntity(List messages, @Nullable String user) implements ToXContentObject { + + private static final String NUMBER_OF_RETURNED_CHOICES_FIELD = "n"; + + private static final String MESSAGES_FIELD = "messages"; + + private static final String ROLE_FIELD = "role"; + + private static final String CONTENT_FIELD = "content"; + + private static final String USER_FIELD = "user"; + + public AzureOpenAiCompletionRequestEntity { + Objects.requireNonNull(messages); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.startArray(MESSAGES_FIELD); + + { + for (String message : messages) { + builder.startObject(); + + { + builder.field(ROLE_FIELD, USER_FIELD); + builder.field(CONTENT_FIELD, message); + } + + builder.endObject(); + } + } + + builder.endArray(); + + builder.field(NUMBER_OF_RETURNED_CHOICES_FIELD, 1); + + if (Strings.isNullOrEmpty(user) == false) { + builder.field(USER_FIELD, user); + } + + builder.endObject(); + return builder; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiEmbeddingsRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiEmbeddingsRequest.java index f60d0130a01b6..00af244fca913 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiEmbeddingsRequest.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiEmbeddingsRequest.java @@ -7,15 +7,10 @@ package org.elasticsearch.xpack.inference.external.request.azureopenai; -import org.apache.http.HttpHeaders; import org.apache.http.client.methods.HttpPost; import org.apache.http.entity.ByteArrayEntity; -import org.apache.http.message.BasicHeader; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.ValidationException; -import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.inference.common.Truncator; -import org.elasticsearch.xpack.inference.external.azureopenai.AzureOpenAiAccount; import org.elasticsearch.xpack.inference.external.request.HttpRequest; import org.elasticsearch.xpack.inference.external.request.Request; import org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsModel; @@ -24,24 +19,15 @@ import java.nio.charset.StandardCharsets; import java.util.Objects; -import static org.elasticsearch.xpack.inference.external.request.RequestUtils.createAuthBearerHeader; -import static org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiUtils.API_KEY_HEADER; -import static org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiSecretSettings.API_KEY; -import static org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiSecretSettings.ENTRA_ID; - public class AzureOpenAiEmbeddingsRequest implements AzureOpenAiRequest { - private static final String MISSING_AUTHENTICATION_ERROR_MESSAGE = - "The request does not have any authentication methods set. One of [%s] or [%s] is required."; private final Truncator truncator; - private final AzureOpenAiAccount account; private final Truncator.TruncationResult truncationResult; private final URI uri; private final AzureOpenAiEmbeddingsModel model; public AzureOpenAiEmbeddingsRequest(Truncator truncator, Truncator.TruncationResult input, AzureOpenAiEmbeddingsModel model) { this.truncator = Objects.requireNonNull(truncator); - this.account = AzureOpenAiAccount.fromModel(model); this.truncationResult = Objects.requireNonNull(input); this.model = Objects.requireNonNull(model); this.uri = model.getUri(); @@ -62,21 +48,7 @@ public HttpRequest createHttpRequest() { ByteArrayEntity byteEntity = new ByteArrayEntity(requestEntity.getBytes(StandardCharsets.UTF_8)); httpPost.setEntity(byteEntity); - httpPost.setHeader(new BasicHeader(HttpHeaders.CONTENT_TYPE, XContentType.JSON.mediaType())); - - var entraId = model.getSecretSettings().entraId(); - var apiKey = model.getSecretSettings().apiKey(); - - if (entraId != null && entraId.isEmpty() == false) { - httpPost.setHeader(createAuthBearerHeader(entraId)); - } else if (apiKey != null && apiKey.isEmpty() == false) { - httpPost.setHeader(new BasicHeader(API_KEY_HEADER, apiKey.toString())); - } else { - // should never happen due to the checks on the secret settings, but just in case - ValidationException validationException = new ValidationException(); - validationException.addValidationError(Strings.format(MISSING_AUTHENTICATION_ERROR_MESSAGE, API_KEY, ENTRA_ID)); - throw validationException; - } + AzureOpenAiRequest.decorateWithAuthHeader(httpPost, model.getSecretSettings()); return new HttpRequest(httpPost, getInferenceEntityId()); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiRequest.java index edb7c70b3903e..79a0e4a4eba33 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiRequest.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiRequest.java @@ -7,6 +7,40 @@ package org.elasticsearch.xpack.inference.external.request.azureopenai; +import org.apache.http.HttpHeaders; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.message.BasicHeader; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.inference.external.request.Request; +import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiSecretSettings; -public interface AzureOpenAiRequest extends Request {} +import static org.elasticsearch.xpack.inference.external.request.RequestUtils.createAuthBearerHeader; +import static org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiUtils.API_KEY_HEADER; +import static org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiSecretSettings.API_KEY; +import static org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiSecretSettings.ENTRA_ID; + +public interface AzureOpenAiRequest extends Request { + + String MISSING_AUTHENTICATION_ERROR_MESSAGE = + "The request does not have any authentication methods set. One of [%s] or [%s] is required."; + + static void decorateWithAuthHeader(HttpPost httpPost, AzureOpenAiSecretSettings secretSettings) { + httpPost.setHeader(new BasicHeader(HttpHeaders.CONTENT_TYPE, XContentType.JSON.mediaType())); + + var entraId = secretSettings.entraId(); + var apiKey = secretSettings.apiKey(); + + if (entraId != null && entraId.isEmpty() == false) { + httpPost.setHeader(createAuthBearerHeader(entraId)); + } else if (apiKey != null && apiKey.isEmpty() == false) { + httpPost.setHeader(new BasicHeader(API_KEY_HEADER, apiKey.toString())); + } else { + // should never happen due to the checks on the secret settings, but just in case + ValidationException validationException = new ValidationException(); + validationException.addValidationError(Strings.format(MISSING_AUTHENTICATION_ERROR_MESSAGE, API_KEY, ENTRA_ID)); + throw validationException; + } + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiUtils.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiUtils.java index 16a02a4c06c1c..6e657640e27ec 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiUtils.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiUtils.java @@ -13,6 +13,8 @@ public class AzureOpenAiUtils { public static final String OPENAI_PATH = "openai"; public static final String DEPLOYMENTS_PATH = "deployments"; public static final String EMBEDDINGS_PATH = "embeddings"; + public static final String CHAT_PATH = "chat"; + public static final String COMPLETIONS_PATH = "completions"; public static final String API_VERSION_PARAMETER = "api-version"; public static final String API_KEY_HEADER = "api-key"; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/XContentUtils.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/XContentUtils.java index 42fd0ddc812ec..55a7f35710cf6 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/XContentUtils.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/XContentUtils.java @@ -39,7 +39,7 @@ public static void moveToFirstToken(XContentParser parser) throws IOException { public static void positionParserAtTokenAfterField(XContentParser parser, String field, String errorMsgTemplate) throws IOException { XContentParser.Token token = parser.nextToken(); - while (token != null && token != XContentParser.Token.END_OBJECT) { + while (token != null) { if (token == XContentParser.Token.FIELD_NAME && parser.currentName().equals(field)) { parser.nextToken(); return; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/azureopenai/AzureOpenAiCompletionResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/azureopenai/AzureOpenAiCompletionResponseEntity.java new file mode 100644 index 0000000000000..ca1df7027cb40 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/azureopenai/AzureOpenAiCompletionResponseEntity.java @@ -0,0 +1,114 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.response.azureopenai; + +import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.inference.results.ChatCompletionResults; +import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.request.Request; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; +import static org.elasticsearch.xpack.inference.external.response.XContentUtils.moveToFirstToken; +import static org.elasticsearch.xpack.inference.external.response.XContentUtils.positionParserAtTokenAfterField; + +public class AzureOpenAiCompletionResponseEntity { + + private static final String FAILED_TO_FIND_FIELD_TEMPLATE = "Failed to find required field [%s] in Azure OpenAI completions response"; + + /** + * Parses the Azure OpenAI completion response. + * For a request like: + * + *

+     *     
+     *         {
+     *             "inputs": "Please summarize this text: some text"
+     *         }
+     *     
+     * 
+ * + * The response would look like: + * + *
+     *     
+     *         {
+     *     "choices": [
+     *         {
+     *             "content_filter_results": {
+     *                 "hate": { ... },
+     *                 "self_harm": { ... },
+     *                 "sexual": { ... },
+     *                 "violence": { ... }
+     *             },
+     *             "finish_reason": "stop",
+     *             "index": 0,
+     *             "logprobs": null,
+     *             "message": {
+     *                 "content": "response",
+     *                 "role": "assistant"
+     *             }
+     *         }
+     *     ],
+     *     "created": 1714982782,
+     *     "id": "...",
+     *     "model": "gpt-4",
+     *     "object": "chat.completion",
+     *     "prompt_filter_results": [
+     *         {
+     *             "prompt_index": 0,
+     *             "content_filter_results": {
+     *                 "hate": { ... },
+     *                 "self_harm": { ... },
+     *                 "sexual": { ... },
+     *                 "violence": { ... }
+     *             }
+     *         }
+     *     ],
+     *     "system_fingerprint": null,
+     *     "usage": { ... }
+     * }
+     *     
+     * 
+ */ + public static ChatCompletionResults fromResponse(Request request, HttpResult response) throws IOException { + var parserConfig = XContentParserConfiguration.EMPTY.withDeprecationHandler(LoggingDeprecationHandler.INSTANCE); + try (XContentParser jsonParser = XContentFactory.xContent(XContentType.JSON).createParser(parserConfig, response.body())) { + moveToFirstToken(jsonParser); + + XContentParser.Token token = jsonParser.currentToken(); + ensureExpectedToken(XContentParser.Token.START_OBJECT, token, jsonParser); + + positionParserAtTokenAfterField(jsonParser, "choices", FAILED_TO_FIND_FIELD_TEMPLATE); + + jsonParser.nextToken(); + ensureExpectedToken(XContentParser.Token.START_OBJECT, jsonParser.currentToken(), jsonParser); + + positionParserAtTokenAfterField(jsonParser, "message", FAILED_TO_FIND_FIELD_TEMPLATE); + + token = jsonParser.currentToken(); + + ensureExpectedToken(XContentParser.Token.START_OBJECT, token, jsonParser); + + positionParserAtTokenAfterField(jsonParser, "content", FAILED_TO_FIND_FIELD_TEMPLATE); + + XContentParser.Token contentToken = jsonParser.currentToken(); + ensureExpectedToken(XContentParser.Token.VALUE_STRING, contentToken, jsonParser); + String content = jsonParser.text(); + + return new ChatCompletionResults(List.of(new ChatCompletionResults.Result(content))); + } + } + +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiModel.java index 5e50229e25643..708088af54cc2 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiModel.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.inference.services.azureopenai; +import org.apache.http.client.utils.URIBuilder; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ModelSecrets; @@ -14,11 +15,18 @@ import org.elasticsearch.inference.TaskSettings; import org.elasticsearch.xpack.inference.external.action.ExecutableAction; import org.elasticsearch.xpack.inference.external.action.azureopenai.AzureOpenAiActionVisitor; +import org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiUtils; import java.net.URI; +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; import java.util.Map; import java.util.Objects; +import static org.elasticsearch.core.Strings.format; + public abstract class AzureOpenAiModel extends Model { protected URI uri; @@ -50,6 +58,30 @@ protected AzureOpenAiModel(AzureOpenAiModel model, ServiceSettings serviceSettin public abstract ExecutableAction accept(AzureOpenAiActionVisitor creator, Map taskSettings); + public final URI buildUriString() throws URISyntaxException { + return AzureOpenAiModel.buildUri(resourceName(), deploymentId(), apiVersion(), operationPathSegments()); + } + + // use only for testing directly + public static URI buildUri(String resourceName, String deploymentId, String apiVersion, String... pathSegments) + throws URISyntaxException { + String hostname = format("%s.%s", resourceName, AzureOpenAiUtils.HOST_SUFFIX); + + return new URIBuilder().setScheme("https") + .setHost(hostname) + .setPathSegments(createPathSegmentsList(deploymentId, pathSegments)) + .addParameter(AzureOpenAiUtils.API_VERSION_PARAMETER, apiVersion) + .build(); + } + + private static List createPathSegmentsList(String deploymentId, String[] pathSegments) { + List pathSegmentsList = new ArrayList<>( + List.of(AzureOpenAiUtils.OPENAI_PATH, AzureOpenAiUtils.DEPLOYMENTS_PATH, deploymentId) + ); + pathSegmentsList.addAll(Arrays.asList(pathSegments)); + return pathSegmentsList; + } + public URI getUri() { return uri; } @@ -62,4 +94,13 @@ public void setUri(URI newUri) { public AzureOpenAiRateLimitServiceSettings rateLimitServiceSettings() { return rateLimitServiceSettings; } + + // TODO: can be inferred directly from modelConfigurations.getServiceSettings(); will be addressed with separate refactoring + public abstract String resourceName(); + + public abstract String deploymentId(); + + public abstract String apiVersion(); + + public abstract String[] operationPathSegments(); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiSecretSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiSecretSettings.java index f871fe6c080a1..48e45f368bfe2 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiSecretSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiSecretSettings.java @@ -25,12 +25,16 @@ import static org.elasticsearch.core.Strings.format; import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalSecureString; -public record AzureOpenAiSecretSettings(@Nullable SecureString apiKey, @Nullable SecureString entraId) implements SecretSettings { +public class AzureOpenAiSecretSettings implements SecretSettings { public static final String NAME = "azure_openai_secret_settings"; public static final String API_KEY = "api_key"; public static final String ENTRA_ID = "entra_id"; + private final SecureString entraId; + + private final SecureString apiKey; + public static AzureOpenAiSecretSettings fromMap(@Nullable Map map) { if (map == null) { return null; @@ -59,14 +63,24 @@ public static AzureOpenAiSecretSettings fromMap(@Nullable Map ma return new AzureOpenAiSecretSettings(secureApiToken, secureEntraId); } - public AzureOpenAiSecretSettings { + public AzureOpenAiSecretSettings(@Nullable SecureString apiKey, @Nullable SecureString entraId) { Objects.requireNonNullElse(apiKey, entraId); + this.apiKey = apiKey; + this.entraId = entraId; } public AzureOpenAiSecretSettings(StreamInput in) throws IOException { this(in.readOptionalSecureString(), in.readOptionalSecureString()); } + public SecureString apiKey() { + return apiKey; + } + + public SecureString entraId() { + return entraId; + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); @@ -98,4 +112,17 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalSecureString(apiKey); out.writeOptionalSecureString(entraId); } + + @Override + public boolean equals(Object object) { + if (this == object) return true; + if (object == null || getClass() != object.getClass()) return false; + AzureOpenAiSecretSettings that = (AzureOpenAiSecretSettings) object; + return Objects.equals(entraId, that.entraId) && Objects.equals(apiKey, that.apiKey); + } + + @Override + public int hashCode() { + return Objects.hash(entraId, apiKey); + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiService.java index c6b97e22b099d..e0e48ab20a86b 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiService.java @@ -35,6 +35,7 @@ import org.elasticsearch.xpack.inference.services.SenderService; import org.elasticsearch.xpack.inference.services.ServiceComponents; import org.elasticsearch.xpack.inference.services.ServiceUtils; +import org.elasticsearch.xpack.inference.services.azureopenai.completion.AzureOpenAiCompletionModel; import org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsModel; import org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsServiceSettings; @@ -121,19 +122,23 @@ private static AzureOpenAiModel createModel( String failureMessage, ConfigurationParseContext context ) { - if (taskType == TaskType.TEXT_EMBEDDING) { - return new AzureOpenAiEmbeddingsModel( - inferenceEntityId, - taskType, - NAME, - serviceSettings, - taskSettings, - secretSettings, - context - ); + switch (taskType) { + case TEXT_EMBEDDING -> { + return new AzureOpenAiEmbeddingsModel( + inferenceEntityId, + taskType, + NAME, + serviceSettings, + taskSettings, + secretSettings, + context + ); + } + case COMPLETION -> { + return new AzureOpenAiCompletionModel(inferenceEntityId, taskType, NAME, serviceSettings, taskSettings, secretSettings); + } + default -> throw new ElasticsearchStatusException(failureMessage, RestStatus.BAD_REQUEST); } - - throw new ElasticsearchStatusException(failureMessage, RestStatus.BAD_REQUEST); } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionModel.java new file mode 100644 index 0000000000000..05cb663453542 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionModel.java @@ -0,0 +1,121 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.azureopenai.completion; + +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.ModelSecrets; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.xpack.inference.external.action.ExecutableAction; +import org.elasticsearch.xpack.inference.external.action.azureopenai.AzureOpenAiActionVisitor; +import org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiUtils; +import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiModel; +import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiSecretSettings; + +import java.net.URISyntaxException; +import java.util.Map; + +public class AzureOpenAiCompletionModel extends AzureOpenAiModel { + + public static AzureOpenAiCompletionModel of(AzureOpenAiCompletionModel model, Map taskSettings) { + if (taskSettings == null || taskSettings.isEmpty()) { + return model; + } + + var requestTaskSettings = AzureOpenAiCompletionRequestTaskSettings.fromMap(taskSettings); + return new AzureOpenAiCompletionModel(model, AzureOpenAiCompletionTaskSettings.of(model.getTaskSettings(), requestTaskSettings)); + } + + public AzureOpenAiCompletionModel( + String inferenceEntityId, + TaskType taskType, + String service, + Map serviceSettings, + Map taskSettings, + @Nullable Map secrets + ) { + this( + inferenceEntityId, + taskType, + service, + AzureOpenAiCompletionServiceSettings.fromMap(serviceSettings), + AzureOpenAiCompletionTaskSettings.fromMap(taskSettings), + AzureOpenAiSecretSettings.fromMap(secrets) + ); + } + + // Should only be used directly for testing + AzureOpenAiCompletionModel( + String inferenceEntityId, + TaskType taskType, + String service, + AzureOpenAiCompletionServiceSettings serviceSettings, + AzureOpenAiCompletionTaskSettings taskSettings, + @Nullable AzureOpenAiSecretSettings secrets + ) { + super( + new ModelConfigurations(inferenceEntityId, taskType, service, serviceSettings, taskSettings), + new ModelSecrets(secrets), + serviceSettings + ); + try { + this.uri = buildUriString(); + } catch (URISyntaxException e) { + throw new RuntimeException(e); + } + } + + public AzureOpenAiCompletionModel(AzureOpenAiCompletionModel originalModel, AzureOpenAiCompletionServiceSettings serviceSettings) { + super(originalModel, serviceSettings); + } + + private AzureOpenAiCompletionModel(AzureOpenAiCompletionModel originalModel, AzureOpenAiCompletionTaskSettings taskSettings) { + super(originalModel, taskSettings); + } + + @Override + public AzureOpenAiCompletionServiceSettings getServiceSettings() { + return (AzureOpenAiCompletionServiceSettings) super.getServiceSettings(); + } + + @Override + public AzureOpenAiCompletionTaskSettings getTaskSettings() { + return (AzureOpenAiCompletionTaskSettings) super.getTaskSettings(); + } + + @Override + public AzureOpenAiSecretSettings getSecretSettings() { + return (AzureOpenAiSecretSettings) super.getSecretSettings(); + } + + @Override + public ExecutableAction accept(AzureOpenAiActionVisitor creator, Map taskSettings) { + return creator.create(this, taskSettings); + } + + @Override + public String resourceName() { + return getServiceSettings().resourceName(); + } + + @Override + public String deploymentId() { + return getServiceSettings().deploymentId(); + } + + @Override + public String apiVersion() { + return getServiceSettings().apiVersion(); + } + + @Override + public String[] operationPathSegments() { + return new String[] { AzureOpenAiUtils.CHAT_PATH, AzureOpenAiUtils.COMPLETIONS_PATH }; + } + +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionRequestTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionRequestTaskSettings.java new file mode 100644 index 0000000000000..5dd42bb1b911f --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionRequestTaskSettings.java @@ -0,0 +1,38 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.azureopenai.completion; + +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.ModelConfigurations; + +import java.util.Map; + +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalString; +import static org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiServiceFields.USER; + +public record AzureOpenAiCompletionRequestTaskSettings(@Nullable String user) { + + public static final AzureOpenAiCompletionRequestTaskSettings EMPTY_SETTINGS = new AzureOpenAiCompletionRequestTaskSettings(null); + + public static AzureOpenAiCompletionRequestTaskSettings fromMap(Map map) { + if (map.isEmpty()) { + return AzureOpenAiCompletionRequestTaskSettings.EMPTY_SETTINGS; + } + + ValidationException validationException = new ValidationException(); + + String user = extractOptionalString(map, USER, ModelConfigurations.TASK_SETTINGS, validationException); + + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + return new AzureOpenAiCompletionRequestTaskSettings(user); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionServiceSettings.java new file mode 100644 index 0000000000000..4100ce7358a3f --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionServiceSettings.java @@ -0,0 +1,183 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.azureopenai.completion; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.ServiceSettings; +import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiRateLimitServiceSettings; +import org.elasticsearch.xpack.inference.services.settings.FilteredXContentObject; +import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; + +import java.io.IOException; +import java.util.Map; +import java.util.Objects; + +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractRequiredString; +import static org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiServiceFields.API_VERSION; +import static org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiServiceFields.DEPLOYMENT_ID; +import static org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiServiceFields.RESOURCE_NAME; + +public class AzureOpenAiCompletionServiceSettings extends FilteredXContentObject + implements + ServiceSettings, + AzureOpenAiRateLimitServiceSettings { + + public static final String NAME = "azure_openai_completions_service_settings"; + + /** + * Rate limit documentation can be found here: + * + * Limits per region per model id + * https://learn.microsoft.com/en-us/azure/ai-services/openai/quotas-limits + * + * How to change the limits + * https://learn.microsoft.com/en-us/azure/ai-services/openai/how-to/quota?tabs=rest + * + * Blog giving some examples + * https://techcommunity.microsoft.com/t5/fasttrack-for-azure/optimizing-azure-openai-a-guide-to-limits-quotas-and-best/ba-p/4076268 + * + * According to the docs 1000 tokens per minute (TPM) = 6 requests per minute (RPM). The limits change depending on the region + * and model. The lowest chat completions limit is 20k TPM, so we'll default to that. + * Calculation: 20K TPM = 20 * 6 = 120 requests per minute (used `francecentral` and `gpt-4` as basis for the calculation). + */ + private static final RateLimitSettings DEFAULT_RATE_LIMIT_SETTINGS = new RateLimitSettings(120); + + public static AzureOpenAiCompletionServiceSettings fromMap(Map map) { + ValidationException validationException = new ValidationException(); + + var settings = fromMap(map, validationException); + + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + return new AzureOpenAiCompletionServiceSettings(settings); + } + + private static AzureOpenAiCompletionServiceSettings.CommonFields fromMap( + Map map, + ValidationException validationException + ) { + String resourceName = extractRequiredString(map, RESOURCE_NAME, ModelConfigurations.SERVICE_SETTINGS, validationException); + String deploymentId = extractRequiredString(map, DEPLOYMENT_ID, ModelConfigurations.SERVICE_SETTINGS, validationException); + String apiVersion = extractRequiredString(map, API_VERSION, ModelConfigurations.SERVICE_SETTINGS, validationException); + RateLimitSettings rateLimitSettings = RateLimitSettings.of(map, DEFAULT_RATE_LIMIT_SETTINGS, validationException); + + return new AzureOpenAiCompletionServiceSettings.CommonFields(resourceName, deploymentId, apiVersion, rateLimitSettings); + } + + private record CommonFields(String resourceName, String deploymentId, String apiVersion, RateLimitSettings rateLimitSettings) {} + + private final String resourceName; + private final String deploymentId; + private final String apiVersion; + + private final RateLimitSettings rateLimitSettings; + + public AzureOpenAiCompletionServiceSettings( + String resourceName, + String deploymentId, + String apiVersion, + @Nullable RateLimitSettings rateLimitSettings + ) { + this.resourceName = resourceName; + this.deploymentId = deploymentId; + this.apiVersion = apiVersion; + this.rateLimitSettings = Objects.requireNonNullElse(rateLimitSettings, DEFAULT_RATE_LIMIT_SETTINGS); + } + + public AzureOpenAiCompletionServiceSettings(StreamInput in) throws IOException { + resourceName = in.readString(); + deploymentId = in.readString(); + apiVersion = in.readString(); + rateLimitSettings = new RateLimitSettings(in); + } + + private AzureOpenAiCompletionServiceSettings(AzureOpenAiCompletionServiceSettings.CommonFields fields) { + this(fields.resourceName, fields.deploymentId, fields.apiVersion, fields.rateLimitSettings); + } + + public String resourceName() { + return resourceName; + } + + public String deploymentId() { + return deploymentId; + } + + @Override + public RateLimitSettings rateLimitSettings() { + return DEFAULT_RATE_LIMIT_SETTINGS; + } + + public String apiVersion() { + return apiVersion; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { + builder.startObject(); + + toXContentFragmentOfExposedFields(builder, params); + rateLimitSettings.toXContent(builder, params); + + builder.endObject(); + return builder; + } + + @Override + protected XContentBuilder toXContentFragmentOfExposedFields(XContentBuilder builder, ToXContent.Params params) throws IOException { + builder.field(RESOURCE_NAME, resourceName); + builder.field(DEPLOYMENT_ID, deploymentId); + builder.field(API_VERSION, apiVersion); + + return builder; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.ML_INFERENCE_AZURE_OPENAI_COMPLETIONS; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(resourceName); + out.writeString(deploymentId); + out.writeString(apiVersion); + rateLimitSettings.writeTo(out); + } + + @Override + public boolean equals(Object object) { + if (this == object) return true; + if (object == null || getClass() != object.getClass()) return false; + AzureOpenAiCompletionServiceSettings that = (AzureOpenAiCompletionServiceSettings) object; + return Objects.equals(resourceName, that.resourceName) + && Objects.equals(deploymentId, that.deploymentId) + && Objects.equals(apiVersion, that.apiVersion) + && Objects.equals(rateLimitSettings, that.rateLimitSettings); + } + + @Override + public int hashCode() { + return Objects.hash(resourceName, deploymentId, apiVersion, rateLimitSettings); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionTaskSettings.java new file mode 100644 index 0000000000000..6e9f77e1ade21 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionTaskSettings.java @@ -0,0 +1,105 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.azureopenai.completion; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.TaskSettings; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Map; +import java.util.Objects; + +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalString; + +public class AzureOpenAiCompletionTaskSettings implements TaskSettings { + + public static final String NAME = "azure_openai_completion_task_settings"; + + public static final String USER = "user"; + + public static AzureOpenAiCompletionTaskSettings fromMap(Map map) { + ValidationException validationException = new ValidationException(); + + String user = extractOptionalString(map, USER, ModelConfigurations.TASK_SETTINGS, validationException); + + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + return new AzureOpenAiCompletionTaskSettings(user); + } + + private final String user; + + public static AzureOpenAiCompletionTaskSettings of( + AzureOpenAiCompletionTaskSettings originalSettings, + AzureOpenAiCompletionRequestTaskSettings requestSettings + ) { + var userToUse = requestSettings.user() == null ? originalSettings.user : requestSettings.user(); + return new AzureOpenAiCompletionTaskSettings(userToUse); + } + + public AzureOpenAiCompletionTaskSettings(@Nullable String user) { + this.user = user; + } + + public AzureOpenAiCompletionTaskSettings(StreamInput in) throws IOException { + this.user = in.readOptionalString(); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + if (user != null) { + builder.field(USER, user); + } + } + builder.endObject(); + return builder; + } + + public String user() { + return user; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.ML_INFERENCE_AZURE_OPENAI_COMPLETIONS; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeOptionalString(user); + } + + @Override + public boolean equals(Object object) { + if (this == object) return true; + if (object == null || getClass() != object.getClass()) return false; + AzureOpenAiCompletionTaskSettings that = (AzureOpenAiCompletionTaskSettings) object; + return Objects.equals(user, that.user); + } + + @Override + public int hashCode() { + return Objects.hash(user); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsModel.java index 93d1e31a3bed1..377bb33f58619 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsModel.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.inference.services.azureopenai.embeddings; -import org.apache.http.client.utils.URIBuilder; import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ModelSecrets; @@ -19,12 +18,9 @@ import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiModel; import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiSecretSettings; -import java.net.URI; import java.net.URISyntaxException; import java.util.Map; -import static org.elasticsearch.core.Strings.format; - public class AzureOpenAiEmbeddingsModel extends AzureOpenAiModel { public static AzureOpenAiEmbeddingsModel of(AzureOpenAiEmbeddingsModel model, Map taskSettings) { @@ -70,7 +66,7 @@ public AzureOpenAiEmbeddingsModel( serviceSettings ); try { - this.uri = getEmbeddingsUri(serviceSettings.resourceName(), serviceSettings.deploymentId(), serviceSettings.apiVersion()); + this.uri = buildUriString(); } catch (URISyntaxException e) { throw new RuntimeException(e); } @@ -104,17 +100,24 @@ public ExecutableAction accept(AzureOpenAiActionVisitor creator, Map { - builder.startObject(); - - toXContentFragmentOfExposedFields(builder, params); - - builder.endObject(); - return builder; - }; + return builder; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettings.java index 33136c339e757..d55615e9df48a 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettings.java @@ -18,9 +18,9 @@ import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ServiceSettings; import org.elasticsearch.inference.SimilarityMeasure; -import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; +import org.elasticsearch.xpack.inference.services.settings.FilteredXContentObject; import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; import java.io.IOException; @@ -38,7 +38,7 @@ import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractSimilarity; import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeAsType; -public class CohereServiceSettings implements ServiceSettings, CohereRateLimitServiceSettings { +public class CohereServiceSettings extends FilteredXContentObject implements ServiceSettings, CohereRateLimitServiceSettings { public static final String NAME = "cohere_service_settings"; public static final String OLD_MODEL_ID_FIELD = "model"; @@ -173,6 +173,14 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } public XContentBuilder toXContentFragment(XContentBuilder builder, Params params) throws IOException { + toXContentFragmentOfExposedFields(builder, params); + rateLimitSettings.toXContent(builder, params); + + return builder; + } + + @Override + public XContentBuilder toXContentFragmentOfExposedFields(XContentBuilder builder, Params params) throws IOException { if (uri != null) { builder.field(URL, uri.toString()); } @@ -188,19 +196,13 @@ public XContentBuilder toXContentFragment(XContentBuilder builder, Params params if (modelId != null) { builder.field(MODEL_ID, modelId); } - rateLimitSettings.toXContent(builder, params); return builder; } - @Override - public ToXContentObject getFilteredXContentObject() { - return this; - } - @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ML_INFERENCE_COHERE_EMBEDDINGS_ADDED; + return TransportVersions.V_8_13_0; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettings.java index 7d78091a20106..685dac0f3877c 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettings.java @@ -16,11 +16,11 @@ import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ServiceSettings; import org.elasticsearch.inference.SimilarityMeasure; -import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; import org.elasticsearch.xpack.inference.services.ServiceUtils; import org.elasticsearch.xpack.inference.services.cohere.CohereServiceSettings; +import org.elasticsearch.xpack.inference.services.settings.FilteredXContentObject; import java.io.IOException; import java.util.EnumSet; @@ -30,7 +30,7 @@ import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalEnum; -public class CohereEmbeddingsServiceSettings implements ServiceSettings { +public class CohereEmbeddingsServiceSettings extends FilteredXContentObject implements ServiceSettings { public static final String NAME = "cohere_embeddings_service_settings"; static final String EMBEDDING_TYPE = "embedding_type"; @@ -160,13 +160,16 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } @Override - public ToXContentObject getFilteredXContentObject() { - return this; + protected XContentBuilder toXContentFragmentOfExposedFields(XContentBuilder builder, Params params) throws IOException { + commonSettings.toXContentFragmentOfExposedFields(builder, params); + builder.field(EMBEDDING_TYPE, elementType()); + + return builder; } @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ML_INFERENCE_COHERE_EMBEDDINGS_ADDED; + return TransportVersions.V_8_13_0; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsTaskSettings.java index 134cb29862e64..0a42df8c0bb41 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsTaskSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsTaskSettings.java @@ -174,7 +174,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ML_INFERENCE_COHERE_EMBEDDINGS_ADDED; + return TransportVersions.V_8_13_0; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankServiceSettings.java index 19538be3734ba..6a74fe533e3db 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankServiceSettings.java @@ -13,16 +13,16 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.inference.ServiceSettings; -import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; import org.elasticsearch.xpack.inference.services.cohere.CohereServiceSettings; +import org.elasticsearch.xpack.inference.services.settings.FilteredXContentObject; import java.io.IOException; import java.util.Map; import java.util.Objects; -public class CohereRerankServiceSettings implements ServiceSettings { +public class CohereRerankServiceSettings extends FilteredXContentObject implements ServiceSettings { public static final String NAME = "cohere_rerank_service_settings"; public static CohereRerankServiceSettings fromMap(Map map, ConfigurationParseContext parseContext) { @@ -62,8 +62,10 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } @Override - public ToXContentObject getFilteredXContentObject() { - return this; + protected XContentBuilder toXContentFragmentOfExposedFields(XContentBuilder builder, Params params) throws IOException { + commonSettings.toXContentFragmentOfExposedFields(builder, params); + + return builder; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandInternalServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandInternalServiceSettings.java index 86ac5bbaaa272..ba98090c92522 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandInternalServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandInternalServiceSettings.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.inference.services.elasticsearch; import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -17,8 +18,6 @@ import java.io.IOException; import java.util.Map; -import static org.elasticsearch.TransportVersions.ML_TEXT_EMBEDDING_INFERENCE_SERVICE_ADDED; - public class CustomElandInternalServiceSettings extends ElasticsearchInternalServiceSettings { public static final String NAME = "custom_eland_model_internal_service_settings"; @@ -86,7 +85,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return ML_TEXT_EMBEDDING_INFERENCE_SERVICE_ADDED; + return TransportVersions.V_8_13_0; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettings.java index f6458b48f99fc..a384dfe9a2c90 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettings.java @@ -8,13 +8,12 @@ package org.elasticsearch.xpack.inference.services.elasticsearch; import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.xpack.inference.services.settings.InternalServiceSettings; import java.io.IOException; -import static org.elasticsearch.TransportVersions.ML_TEXT_EMBEDDING_INFERENCE_SERVICE_ADDED; - public class ElasticsearchInternalServiceSettings extends InternalServiceSettings { public static final String NAME = "text_embedding_internal_service_settings"; @@ -34,7 +33,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return ML_TEXT_EMBEDDING_INFERENCE_SERVICE_ADDED; + return TransportVersions.V_8_13_0; } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/MultilingualE5SmallInternalServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/MultilingualE5SmallInternalServiceSettings.java index 3347917bab2b5..d514ca6a917d4 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/MultilingualE5SmallInternalServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/MultilingualE5SmallInternalServiceSettings.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.inference.services.elasticsearch; -import org.elasticsearch.TransportVersion; import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -21,8 +20,6 @@ import java.util.Arrays; import java.util.Map; -import static org.elasticsearch.TransportVersions.ML_TEXT_EMBEDDING_INFERENCE_SERVICE_ADDED; - public class MultilingualE5SmallInternalServiceSettings extends ElasticsearchInternalServiceSettings { public static final String NAME = "multilingual_e5_small_service_settings"; @@ -104,11 +101,6 @@ public String getWriteableName() { return MultilingualE5SmallInternalServiceSettings.NAME; } - @Override - public TransportVersion getMinimalSupportedVersion() { - return ML_TEXT_EMBEDDING_INFERENCE_SERVICE_ADDED; - } - @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettings.java index beb9035640024..af2c433663ac4 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettings.java @@ -17,8 +17,8 @@ import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ServiceSettings; import org.elasticsearch.inference.SimilarityMeasure; -import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.inference.services.settings.FilteredXContentObject; import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; import java.io.IOException; @@ -36,7 +36,7 @@ import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractSimilarity; import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeAsType; -public class HuggingFaceServiceSettings implements ServiceSettings, HuggingFaceRateLimitServiceSettings { +public class HuggingFaceServiceSettings extends FilteredXContentObject implements ServiceSettings, HuggingFaceRateLimitServiceSettings { public static final String NAME = "hugging_face_service_settings"; // At the time of writing HuggingFace hasn't posted the default rate limit for inference endpoints so the value here is only a guess @@ -118,6 +118,14 @@ public HuggingFaceServiceSettings(StreamInput in) throws IOException { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); + toXContentFragmentOfExposedFields(builder, params); + rateLimitSettings.toXContent(builder, params); + builder.endObject(); + return builder; + } + + @Override + protected XContentBuilder toXContentFragmentOfExposedFields(XContentBuilder builder, Params params) throws IOException { builder.field(URL, uri.toString()); if (similarity != null) { builder.field(SIMILARITY, similarity); @@ -128,14 +136,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (maxInputTokens != null) { builder.field(MAX_INPUT_TOKENS, maxInputTokens); } - rateLimitSettings.toXContent(builder, params); - builder.endObject(); - return builder; - } - @Override - public ToXContentObject getFilteredXContentObject() { - return this; + return builder; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettings.java index a48ccd14fdb66..1f337de450ef9 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettings.java @@ -14,9 +14,9 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.ServiceSettings; -import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.inference.services.huggingface.HuggingFaceRateLimitServiceSettings; +import org.elasticsearch.xpack.inference.services.settings.FilteredXContentObject; import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; import java.io.IOException; @@ -28,7 +28,10 @@ import static org.elasticsearch.xpack.inference.services.ServiceUtils.createUri; import static org.elasticsearch.xpack.inference.services.huggingface.HuggingFaceServiceSettings.extractUri; -public class HuggingFaceElserServiceSettings implements ServiceSettings, HuggingFaceRateLimitServiceSettings { +public class HuggingFaceElserServiceSettings extends FilteredXContentObject + implements + ServiceSettings, + HuggingFaceRateLimitServiceSettings { public static final String NAME = "hugging_face_elser_service_settings"; static final String URL = "url"; @@ -56,7 +59,8 @@ public HuggingFaceElserServiceSettings(String url) { rateLimitSettings = DEFAULT_RATE_LIMIT_SETTINGS; } - private HuggingFaceElserServiceSettings(URI uri, @Nullable RateLimitSettings rateLimitSettings) { + // default for testing + HuggingFaceElserServiceSettings(URI uri, @Nullable RateLimitSettings rateLimitSettings) { this.uri = Objects.requireNonNull(uri); this.rateLimitSettings = Objects.requireNonNullElse(rateLimitSettings, DEFAULT_RATE_LIMIT_SETTINGS); } @@ -88,8 +92,7 @@ public int maxInputTokens() { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(URL, uri.toString()); - builder.field(MAX_INPUT_TOKENS, ELSER_TOKEN_LIMIT); + toXContentFragmentOfExposedFields(builder, params); rateLimitSettings.toXContent(builder, params); builder.endObject(); @@ -97,8 +100,11 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } @Override - public ToXContentObject getFilteredXContentObject() { - return this; + protected XContentBuilder toXContentFragmentOfExposedFields(XContentBuilder builder, Params params) throws IOException { + builder.field(URL, uri.toString()); + builder.field(MAX_INPUT_TOKENS, ELSER_TOKEN_LIMIT); + + return builder; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionServiceSettings.java index 7703476a14dea..5105bb59e048f 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionServiceSettings.java @@ -15,9 +15,9 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ServiceSettings; -import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.inference.services.openai.OpenAiRateLimitServiceSettings; +import org.elasticsearch.xpack.inference.services.settings.FilteredXContentObject; import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; import java.io.IOException; @@ -38,7 +38,7 @@ /** * Defines the service settings for interacting with OpenAI's chat completion models. */ -public class OpenAiChatCompletionServiceSettings implements ServiceSettings, OpenAiRateLimitServiceSettings { +public class OpenAiChatCompletionServiceSettings extends FilteredXContentObject implements ServiceSettings, OpenAiRateLimitServiceSettings { public static final String NAME = "openai_completion_service_settings"; @@ -141,24 +141,29 @@ public Integer maxInputTokens() { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - { - builder.field(MODEL_ID, modelId); + toXContentFragmentOfExposedFields(builder, params); + rateLimitSettings.toXContent(builder, params); - if (uri != null) { - builder.field(URL, uri.toString()); - } + builder.endObject(); + return builder; + } - if (organizationId != null) { - builder.field(ORGANIZATION, organizationId); - } + @Override + protected XContentBuilder toXContentFragmentOfExposedFields(XContentBuilder builder, Params params) throws IOException { + builder.field(MODEL_ID, modelId); - if (maxInputTokens != null) { - builder.field(MAX_INPUT_TOKENS, maxInputTokens); - } + if (uri != null) { + builder.field(URL, uri.toString()); + } + + if (organizationId != null) { + builder.field(ORGANIZATION, organizationId); + } + + if (maxInputTokens != null) { + builder.field(MAX_INPUT_TOKENS, maxInputTokens); } - rateLimitSettings.toXContent(builder, params); - builder.endObject(); return builder; } @@ -184,11 +189,6 @@ public void writeTo(StreamOutput out) throws IOException { } } - @Override - public ToXContentObject getFilteredXContentObject() { - return this; - } - @Override public boolean equals(Object object) { if (this == object) return true; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsRequestTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsRequestTaskSettings.java index 373704af37fcd..b3b94f7584563 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsRequestTaskSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsRequestTaskSettings.java @@ -7,8 +7,6 @@ package org.elasticsearch.xpack.inference.services.openai.embeddings; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.elasticsearch.common.ValidationException; import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.ModelConfigurations; @@ -25,7 +23,6 @@ * @param user a unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse */ public record OpenAiEmbeddingsRequestTaskSettings(@Nullable String user) { - private static final Logger logger = LogManager.getLogger(OpenAiEmbeddingsRequestTaskSettings.class); public static final OpenAiEmbeddingsRequestTaskSettings EMPTY_SETTINGS = new OpenAiEmbeddingsRequestTaskSettings(null); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettings.java index 8edbb7bc14f2c..fc479009d3334 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettings.java @@ -17,10 +17,10 @@ import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ServiceSettings; import org.elasticsearch.inference.SimilarityMeasure; -import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; import org.elasticsearch.xpack.inference.services.openai.OpenAiRateLimitServiceSettings; +import org.elasticsearch.xpack.inference.services.settings.FilteredXContentObject; import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; import java.io.IOException; @@ -44,7 +44,7 @@ /** * Defines the service settings for interacting with OpenAI's text embedding models. */ -public class OpenAiEmbeddingsServiceSettings implements ServiceSettings, OpenAiRateLimitServiceSettings { +public class OpenAiEmbeddingsServiceSettings extends FilteredXContentObject implements ServiceSettings, OpenAiRateLimitServiceSettings { public static final String NAME = "openai_service_settings"; @@ -177,14 +177,11 @@ public OpenAiEmbeddingsServiceSettings(StreamInput in) throws IOException { maxInputTokens = null; } - if (in.getTransportVersion().onOrAfter(TransportVersions.ML_DIMENSIONS_SET_BY_USER_ADDED)) { - dimensionsSetByUser = in.readBoolean(); - } else { - dimensionsSetByUser = false; - } if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { + dimensionsSetByUser = in.readBoolean(); modelId = in.readString(); } else { + dimensionsSetByUser = false; modelId = "unset"; } @@ -261,6 +258,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.startObject(); toXContentFragmentOfExposedFields(builder, params); + rateLimitSettings.toXContent(builder, params); if (dimensionsSetByUser != null) { builder.field(DIMENSIONS_SET_BY_USER, dimensionsSetByUser); @@ -270,7 +268,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } - private void toXContentFragmentOfExposedFields(XContentBuilder builder, Params params) throws IOException { + @Override + protected XContentBuilder toXContentFragmentOfExposedFields(XContentBuilder builder, Params params) throws IOException { builder.field(MODEL_ID, modelId); if (uri != null) { builder.field(URL, uri.toString()); @@ -287,19 +286,8 @@ private void toXContentFragmentOfExposedFields(XContentBuilder builder, Params p if (maxInputTokens != null) { builder.field(MAX_INPUT_TOKENS, maxInputTokens); } - rateLimitSettings.toXContent(builder, params); - } - - @Override - public ToXContentObject getFilteredXContentObject() { - return (builder, params) -> { - builder.startObject(); - toXContentFragmentOfExposedFields(builder, params); - - builder.endObject(); - return builder; - }; + return builder; } @Override @@ -319,10 +307,8 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalVInt(maxInputTokens); } - if (out.getTransportVersion().onOrAfter(TransportVersions.ML_DIMENSIONS_SET_BY_USER_ADDED)) { - out.writeBoolean(dimensionsSetByUser); - } if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { + out.writeBoolean(dimensionsSetByUser); out.writeString(modelId); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/settings/FilteredXContentObject.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/settings/FilteredXContentObject.java new file mode 100644 index 0000000000000..655e50e073972 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/settings/FilteredXContentObject.java @@ -0,0 +1,32 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.settings; + +import org.elasticsearch.inference.FilteredXContent; +import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; + +public abstract class FilteredXContentObject implements FilteredXContent { + @Override + public ToXContentObject getFilteredXContentObject() { + return (builder, params) -> { + builder.startObject(); + + toXContentFragmentOfExposedFields(builder, params); + + builder.endObject(); + return builder; + }; + } + + protected abstract XContentBuilder toXContentFragmentOfExposedFields(XContentBuilder builder, ToXContent.Params params) + throws IOException; +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterTests.java new file mode 100644 index 0000000000000..c87faa2b52cc8 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterTests.java @@ -0,0 +1,386 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.action.filter; + +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.bulk.BulkItemRequest; +import org.elasticsearch.action.bulk.BulkItemResponse; +import org.elasticsearch.action.bulk.BulkShardRequest; +import org.elasticsearch.action.bulk.TransportShardBulkAction; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.support.ActionFilterChain; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.cluster.metadata.InferenceFieldMetadata; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.inference.ChunkedInferenceServiceResults; +import org.elasticsearch.inference.InferenceService; +import org.elasticsearch.inference.InferenceServiceRegistry; +import org.elasticsearch.inference.Model; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xcontent.json.JsonXContent; +import org.elasticsearch.xpack.core.inference.results.ChunkedSparseEmbeddingResults; +import org.elasticsearch.xpack.core.inference.results.ErrorChunkedInferenceResults; +import org.elasticsearch.xpack.inference.model.TestModel; +import org.elasticsearch.xpack.inference.registry.ModelRegistry; +import org.junit.After; +import org.junit.Before; +import org.mockito.stubbing.Answer; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.awaitLatch; +import static org.elasticsearch.xpack.inference.action.filter.ShardBulkInferenceActionFilter.DEFAULT_BATCH_SIZE; +import static org.elasticsearch.xpack.inference.action.filter.ShardBulkInferenceActionFilter.getIndexRequestOrNull; +import static org.elasticsearch.xpack.inference.mapper.SemanticTextFieldTests.randomSemanticText; +import static org.elasticsearch.xpack.inference.mapper.SemanticTextFieldTests.randomSparseEmbeddings; +import static org.elasticsearch.xpack.inference.mapper.SemanticTextFieldTests.toChunkedResult; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.mockito.Mockito.any; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class ShardBulkInferenceActionFilterTests extends ESTestCase { + private ThreadPool threadPool; + + @Before + public void setupThreadPool() { + threadPool = new TestThreadPool(getTestName()); + } + + @After + public void tearDownThreadPool() throws Exception { + terminate(threadPool); + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + public void testFilterNoop() throws Exception { + ShardBulkInferenceActionFilter filter = createFilter(threadPool, Map.of(), DEFAULT_BATCH_SIZE); + CountDownLatch chainExecuted = new CountDownLatch(1); + ActionFilterChain actionFilterChain = (task, action, request, listener) -> { + try { + assertNull(((BulkShardRequest) request).getInferenceFieldMap()); + } finally { + chainExecuted.countDown(); + } + }; + ActionListener actionListener = mock(ActionListener.class); + Task task = mock(Task.class); + BulkShardRequest request = new BulkShardRequest( + new ShardId("test", "test", 0), + WriteRequest.RefreshPolicy.NONE, + new BulkItemRequest[0] + ); + request.setInferenceFieldMap( + Map.of("foo", new InferenceFieldMetadata("foo", "bar", generateRandomStringArray(5, 10, false, false))) + ); + filter.apply(task, TransportShardBulkAction.ACTION_NAME, request, actionListener, actionFilterChain); + awaitLatch(chainExecuted, 10, TimeUnit.SECONDS); + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + public void testInferenceNotFound() throws Exception { + StaticModel model = StaticModel.createRandomInstance(); + ShardBulkInferenceActionFilter filter = createFilter( + threadPool, + Map.of(model.getInferenceEntityId(), model), + randomIntBetween(1, 10) + ); + CountDownLatch chainExecuted = new CountDownLatch(1); + ActionFilterChain actionFilterChain = (task, action, request, listener) -> { + try { + BulkShardRequest bulkShardRequest = (BulkShardRequest) request; + assertNull(bulkShardRequest.getInferenceFieldMap()); + for (BulkItemRequest item : bulkShardRequest.items()) { + assertNotNull(item.getPrimaryResponse()); + assertTrue(item.getPrimaryResponse().isFailed()); + BulkItemResponse.Failure failure = item.getPrimaryResponse().getFailure(); + assertThat(failure.getStatus(), equalTo(RestStatus.NOT_FOUND)); + } + } finally { + chainExecuted.countDown(); + } + }; + ActionListener actionListener = mock(ActionListener.class); + Task task = mock(Task.class); + + Map inferenceFieldMap = Map.of( + "field1", + new InferenceFieldMetadata("field1", model.getInferenceEntityId(), new String[] { "field1" }), + "field2", + new InferenceFieldMetadata("field2", "inference_0", new String[] { "field2" }), + "field3", + new InferenceFieldMetadata("field3", "inference_0", new String[] { "field3" }) + ); + BulkItemRequest[] items = new BulkItemRequest[10]; + for (int i = 0; i < items.length; i++) { + items[i] = randomBulkItemRequest(Map.of(), inferenceFieldMap)[0]; + } + BulkShardRequest request = new BulkShardRequest(new ShardId("test", "test", 0), WriteRequest.RefreshPolicy.NONE, items); + request.setInferenceFieldMap(inferenceFieldMap); + filter.apply(task, TransportShardBulkAction.ACTION_NAME, request, actionListener, actionFilterChain); + awaitLatch(chainExecuted, 10, TimeUnit.SECONDS); + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + public void testItemFailures() throws Exception { + StaticModel model = StaticModel.createRandomInstance(); + ShardBulkInferenceActionFilter filter = createFilter( + threadPool, + Map.of(model.getInferenceEntityId(), model), + randomIntBetween(1, 10) + ); + model.putResult("I am a failure", new ErrorChunkedInferenceResults(new IllegalArgumentException("boom"))); + model.putResult("I am a success", randomSparseEmbeddings(List.of("I am a success"))); + CountDownLatch chainExecuted = new CountDownLatch(1); + ActionFilterChain actionFilterChain = (task, action, request, listener) -> { + try { + BulkShardRequest bulkShardRequest = (BulkShardRequest) request; + assertNull(bulkShardRequest.getInferenceFieldMap()); + assertThat(bulkShardRequest.items().length, equalTo(3)); + + // item 0 is a failure + assertNotNull(bulkShardRequest.items()[0].getPrimaryResponse()); + assertTrue(bulkShardRequest.items()[0].getPrimaryResponse().isFailed()); + BulkItemResponse.Failure failure = bulkShardRequest.items()[0].getPrimaryResponse().getFailure(); + assertThat(failure.getCause().getCause().getMessage(), containsString("boom")); + + // item 1 is a success + assertNull(bulkShardRequest.items()[1].getPrimaryResponse()); + IndexRequest actualRequest = getIndexRequestOrNull(bulkShardRequest.items()[1].request()); + assertThat(XContentMapValues.extractValue("field1.text", actualRequest.sourceAsMap()), equalTo("I am a success")); + + // item 2 is a failure + assertNotNull(bulkShardRequest.items()[2].getPrimaryResponse()); + assertTrue(bulkShardRequest.items()[2].getPrimaryResponse().isFailed()); + failure = bulkShardRequest.items()[2].getPrimaryResponse().getFailure(); + assertThat(failure.getCause().getCause().getMessage(), containsString("boom")); + } finally { + chainExecuted.countDown(); + } + }; + ActionListener actionListener = mock(ActionListener.class); + Task task = mock(Task.class); + + Map inferenceFieldMap = Map.of( + "field1", + new InferenceFieldMetadata("field1", model.getInferenceEntityId(), new String[] { "field1" }) + ); + BulkItemRequest[] items = new BulkItemRequest[3]; + items[0] = new BulkItemRequest(0, new IndexRequest("index").source("field1", "I am a failure")); + items[1] = new BulkItemRequest(1, new IndexRequest("index").source("field1", "I am a success")); + items[2] = new BulkItemRequest(2, new IndexRequest("index").source("field1", "I am a failure")); + BulkShardRequest request = new BulkShardRequest(new ShardId("test", "test", 0), WriteRequest.RefreshPolicy.NONE, items); + request.setInferenceFieldMap(inferenceFieldMap); + filter.apply(task, TransportShardBulkAction.ACTION_NAME, request, actionListener, actionFilterChain); + awaitLatch(chainExecuted, 10, TimeUnit.SECONDS); + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + public void testManyRandomDocs() throws Exception { + Map inferenceModelMap = new HashMap<>(); + int numModels = randomIntBetween(1, 5); + for (int i = 0; i < numModels; i++) { + StaticModel model = StaticModel.createRandomInstance(); + inferenceModelMap.put(model.getInferenceEntityId(), model); + } + + int numInferenceFields = randomIntBetween(1, 5); + Map inferenceFieldMap = new HashMap<>(); + for (int i = 0; i < numInferenceFields; i++) { + String field = randomAlphaOfLengthBetween(5, 10); + String inferenceId = randomFrom(inferenceModelMap.keySet()); + inferenceFieldMap.put(field, new InferenceFieldMetadata(field, inferenceId, new String[] { field })); + } + + int numRequests = randomIntBetween(100, 1000); + BulkItemRequest[] originalRequests = new BulkItemRequest[numRequests]; + BulkItemRequest[] modifiedRequests = new BulkItemRequest[numRequests]; + for (int id = 0; id < numRequests; id++) { + BulkItemRequest[] res = randomBulkItemRequest(inferenceModelMap, inferenceFieldMap); + originalRequests[id] = res[0]; + modifiedRequests[id] = res[1]; + } + + ShardBulkInferenceActionFilter filter = createFilter(threadPool, inferenceModelMap, randomIntBetween(10, 30)); + CountDownLatch chainExecuted = new CountDownLatch(1); + ActionFilterChain actionFilterChain = (task, action, request, listener) -> { + try { + assertThat(request, instanceOf(BulkShardRequest.class)); + BulkShardRequest bulkShardRequest = (BulkShardRequest) request; + assertNull(bulkShardRequest.getInferenceFieldMap()); + BulkItemRequest[] items = bulkShardRequest.items(); + assertThat(items.length, equalTo(originalRequests.length)); + for (int id = 0; id < items.length; id++) { + IndexRequest actualRequest = getIndexRequestOrNull(items[id].request()); + IndexRequest expectedRequest = getIndexRequestOrNull(modifiedRequests[id].request()); + try { + assertToXContentEquivalent(expectedRequest.source(), actualRequest.source(), expectedRequest.getContentType()); + } catch (Exception exc) { + throw new IllegalStateException(exc); + } + } + } finally { + chainExecuted.countDown(); + } + }; + ActionListener actionListener = mock(ActionListener.class); + Task task = mock(Task.class); + BulkShardRequest original = new BulkShardRequest(new ShardId("test", "test", 0), WriteRequest.RefreshPolicy.NONE, originalRequests); + original.setInferenceFieldMap(inferenceFieldMap); + filter.apply(task, TransportShardBulkAction.ACTION_NAME, original, actionListener, actionFilterChain); + awaitLatch(chainExecuted, 10, TimeUnit.SECONDS); + } + + @SuppressWarnings("unchecked") + private static ShardBulkInferenceActionFilter createFilter(ThreadPool threadPool, Map modelMap, int batchSize) { + ModelRegistry modelRegistry = mock(ModelRegistry.class); + Answer unparsedModelAnswer = invocationOnMock -> { + String id = (String) invocationOnMock.getArguments()[0]; + ActionListener listener = (ActionListener) invocationOnMock + .getArguments()[1]; + var model = modelMap.get(id); + if (model != null) { + listener.onResponse( + new ModelRegistry.UnparsedModel( + model.getInferenceEntityId(), + model.getTaskType(), + model.getServiceSettings().model(), + XContentHelper.convertToMap(JsonXContent.jsonXContent, Strings.toString(model.getTaskSettings()), false), + XContentHelper.convertToMap(JsonXContent.jsonXContent, Strings.toString(model.getSecretSettings()), false) + ) + ); + } else { + listener.onFailure(new ResourceNotFoundException("model id [{}] not found", id)); + } + return null; + }; + doAnswer(unparsedModelAnswer).when(modelRegistry).getModelWithSecrets(any(), any()); + + InferenceService inferenceService = mock(InferenceService.class); + Answer chunkedInferAnswer = invocationOnMock -> { + StaticModel model = (StaticModel) invocationOnMock.getArguments()[0]; + List inputs = (List) invocationOnMock.getArguments()[2]; + ActionListener> listener = (ActionListener< + List>) invocationOnMock.getArguments()[7]; + Runnable runnable = () -> { + List results = new ArrayList<>(); + for (String input : inputs) { + results.add(model.getResults(input)); + } + listener.onResponse(results); + }; + if (randomBoolean()) { + try { + threadPool.generic().execute(runnable); + } catch (Exception exc) { + listener.onFailure(exc); + } + } else { + runnable.run(); + } + return null; + }; + doAnswer(chunkedInferAnswer).when(inferenceService).chunkedInfer(any(), any(), any(), any(), any(), any(), any(), any()); + + Answer modelAnswer = invocationOnMock -> { + String inferenceId = (String) invocationOnMock.getArguments()[0]; + return modelMap.get(inferenceId); + }; + doAnswer(modelAnswer).when(inferenceService).parsePersistedConfigWithSecrets(any(), any(), any(), any()); + + InferenceServiceRegistry inferenceServiceRegistry = mock(InferenceServiceRegistry.class); + when(inferenceServiceRegistry.getService(any())).thenReturn(Optional.of(inferenceService)); + ShardBulkInferenceActionFilter filter = new ShardBulkInferenceActionFilter(inferenceServiceRegistry, modelRegistry, batchSize); + return filter; + } + + private static BulkItemRequest[] randomBulkItemRequest( + Map modelMap, + Map fieldInferenceMap + ) { + Map docMap = new LinkedHashMap<>(); + Map expectedDocMap = new LinkedHashMap<>(); + XContentType requestContentType = randomFrom(XContentType.values()); + for (var entry : fieldInferenceMap.values()) { + String field = entry.getName(); + var model = modelMap.get(entry.getInferenceId()); + String text = randomAlphaOfLengthBetween(10, 100); + docMap.put(field, text); + expectedDocMap.put(field, text); + if (model == null) { + // ignore results, the doc should fail with a resource not found exception + continue; + } + var result = randomSemanticText(field, model, List.of(text), requestContentType); + model.putResult(text, toChunkedResult(result)); + expectedDocMap.put(field, result); + } + + int requestId = randomIntBetween(0, Integer.MAX_VALUE); + return new BulkItemRequest[] { + new BulkItemRequest(requestId, new IndexRequest("index").source(docMap, requestContentType)), + new BulkItemRequest(requestId, new IndexRequest("index").source(expectedDocMap, requestContentType)) }; + } + + private static class StaticModel extends TestModel { + private final Map resultMap; + + StaticModel( + String inferenceEntityId, + TaskType taskType, + String service, + TestServiceSettings serviceSettings, + TestTaskSettings taskSettings, + TestSecretSettings secretSettings + ) { + super(inferenceEntityId, taskType, service, serviceSettings, taskSettings, secretSettings); + this.resultMap = new HashMap<>(); + } + + public static StaticModel createRandomInstance() { + TestModel testModel = TestModel.createRandomInstance(); + return new StaticModel( + testModel.getInferenceEntityId(), + testModel.getTaskType(), + randomAlphaOfLength(10), + testModel.getServiceSettings(), + testModel.getTaskSettings(), + testModel.getSecretSettings() + ); + } + + ChunkedInferenceServiceResults getResults(String text) { + return resultMap.getOrDefault(text, new ChunkedSparseEmbeddingResults(List.of())); + } + + void putResult(String text, ChunkedInferenceServiceResults result) { + resultMap.put(text, result); + } + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiActionCreatorTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiActionCreatorTests.java index 4bdba67beec17..567e26101283e 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiActionCreatorTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiActionCreatorTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.inference.action.InferenceAction; +import org.elasticsearch.xpack.core.inference.results.ChatCompletionResults; import org.elasticsearch.xpack.inference.external.http.HttpClientManager; import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderTests; @@ -45,8 +46,9 @@ import static org.elasticsearch.xpack.inference.external.http.retry.RetrySettingsTests.buildSettingsWithRetryFields; import static org.elasticsearch.xpack.inference.results.TextEmbeddingResultsTests.buildExpectation; import static org.elasticsearch.xpack.inference.services.ServiceComponentsTests.createWithEmptySettings; +import static org.elasticsearch.xpack.inference.services.azureopenai.completion.AzureOpenAiCompletionModelTests.createCompletionModel; import static org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsModelTests.createModel; -import static org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsRequestTaskSettingsTests.getRequestTaskSettingsMap; +import static org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsRequestTaskSettingsTests.createRequestTaskSettingsMap; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; @@ -54,6 +56,11 @@ public class AzureOpenAiActionCreatorTests extends ESTestCase { private static final TimeValue TIMEOUT = new TimeValue(30, TimeUnit.SECONDS); + private static final Settings ZERO_TIMEOUT_SETTINGS = buildSettingsWithRetryFields( + TimeValue.timeValueMillis(1), + TimeValue.timeValueMinutes(1), + TimeValue.timeValueSeconds(0) + ); private final MockWebServer webServer = new MockWebServer(); private ThreadPool threadPool; private HttpClientManager clientManager; @@ -103,7 +110,7 @@ public void testCreate_AzureOpenAiEmbeddingsModel() throws IOException { var model = createModel("resource", "deployment", "apiversion", "orig_user", "apikey", null, "id"); model.setUri(new URI(getUrl(webServer))); var actionCreator = new AzureOpenAiActionCreator(sender, createWithEmptySettings(threadPool)); - var overriddenTaskSettings = getRequestTaskSettingsMap("overridden_user"); + var overriddenTaskSettings = createRequestTaskSettingsMap("overridden_user"); var action = (AzureOpenAiEmbeddingsAction) actionCreator.create(model, overriddenTaskSettings); PlainActionFuture listener = new PlainActionFuture<>(); @@ -116,7 +123,7 @@ public void testCreate_AzureOpenAiEmbeddingsModel() throws IOException { validateRequestWithApiKey(webServer.requests().get(0), "apikey"); var requestMap = entityAsMap(webServer.requests().get(0).getBody()); - validateRequestMapWithUser(requestMap, List.of("abc"), "overridden_user"); + validateEmbeddingsRequestMapWithUser(requestMap, List.of("abc"), "overridden_user"); } catch (URISyntaxException e) { throw new RuntimeException(e); } @@ -153,7 +160,7 @@ public void testCreate_AzureOpenAiEmbeddingsModel_WithoutUser() throws IOExcepti var model = createModel("resource", "deployment", "apiversion", null, "apikey", null, "id"); model.setUri(new URI(getUrl(webServer))); var actionCreator = new AzureOpenAiActionCreator(sender, createWithEmptySettings(threadPool)); - var overriddenTaskSettings = getRequestTaskSettingsMap(null); + var overriddenTaskSettings = createRequestTaskSettingsMap(null); var action = (AzureOpenAiEmbeddingsAction) actionCreator.create(model, overriddenTaskSettings); PlainActionFuture listener = new PlainActionFuture<>(); @@ -166,7 +173,7 @@ public void testCreate_AzureOpenAiEmbeddingsModel_WithoutUser() throws IOExcepti validateRequestWithApiKey(webServer.requests().get(0), "apikey"); var requestMap = entityAsMap(webServer.requests().get(0).getBody()); - validateRequestMapWithUser(requestMap, List.of("abc"), null); + validateEmbeddingsRequestMapWithUser(requestMap, List.of("abc"), null); } catch (URISyntaxException e) { throw new RuntimeException(e); } @@ -174,12 +181,7 @@ public void testCreate_AzureOpenAiEmbeddingsModel_WithoutUser() throws IOExcepti public void testCreate_AzureOpenAiEmbeddingsModel_FailsFromInvalidResponseFormat() throws IOException { // timeout as zero for no retries - var settings = buildSettingsWithRetryFields( - TimeValue.timeValueMillis(1), - TimeValue.timeValueMinutes(1), - TimeValue.timeValueSeconds(0) - ); - var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager, settings); + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager, ZERO_TIMEOUT_SETTINGS); try (var sender = senderFactory.createSender("test_service")) { sender.start(); @@ -209,7 +211,7 @@ public void testCreate_AzureOpenAiEmbeddingsModel_FailsFromInvalidResponseFormat var model = createModel("resource", "deployment", "apiversion", null, "apikey", null, "id"); model.setUri(new URI(getUrl(webServer))); var actionCreator = new AzureOpenAiActionCreator(sender, createWithEmptySettings(threadPool)); - var overriddenTaskSettings = getRequestTaskSettingsMap("overridden_user"); + var overriddenTaskSettings = createRequestTaskSettingsMap("overridden_user"); var action = (AzureOpenAiEmbeddingsAction) actionCreator.create(model, overriddenTaskSettings); PlainActionFuture listener = new PlainActionFuture<>(); @@ -226,7 +228,7 @@ public void testCreate_AzureOpenAiEmbeddingsModel_FailsFromInvalidResponseFormat validateRequestWithApiKey(webServer.requests().get(0), "apikey"); var requestMap = entityAsMap(webServer.requests().get(0).getBody()); - validateRequestMapWithUser(requestMap, List.of("abc"), "overridden_user"); + validateEmbeddingsRequestMapWithUser(requestMap, List.of("abc"), "overridden_user"); } catch (URISyntaxException e) { throw new RuntimeException(e); } @@ -281,7 +283,7 @@ public void testExecute_ReturnsSuccessfulResponse_AfterTruncating_From413StatusC var model = createModel("resource", "deployment", "apiversion", null, "apikey", null, "id"); model.setUri(new URI(getUrl(webServer))); var actionCreator = new AzureOpenAiActionCreator(sender, createWithEmptySettings(threadPool)); - var overriddenTaskSettings = getRequestTaskSettingsMap("overridden_user"); + var overriddenTaskSettings = createRequestTaskSettingsMap("overridden_user"); var action = (AzureOpenAiEmbeddingsAction) actionCreator.create(model, overriddenTaskSettings); PlainActionFuture listener = new PlainActionFuture<>(); @@ -295,13 +297,13 @@ public void testExecute_ReturnsSuccessfulResponse_AfterTruncating_From413StatusC validateRequestWithApiKey(webServer.requests().get(0), "apikey"); var requestMap = entityAsMap(webServer.requests().get(0).getBody()); - validateRequestMapWithUser(requestMap, List.of("abcd"), "overridden_user"); + validateEmbeddingsRequestMapWithUser(requestMap, List.of("abcd"), "overridden_user"); } { validateRequestWithApiKey(webServer.requests().get(1), "apikey"); var requestMap = entityAsMap(webServer.requests().get(1).getBody()); - validateRequestMapWithUser(requestMap, List.of("ab"), "overridden_user"); + validateEmbeddingsRequestMapWithUser(requestMap, List.of("ab"), "overridden_user"); } } catch (URISyntaxException e) { throw new RuntimeException(e); @@ -357,7 +359,7 @@ public void testExecute_ReturnsSuccessfulResponse_AfterTruncating_From400StatusC var model = createModel("resource", "deployment", "apiversion", null, "apikey", null, "id"); model.setUri(new URI(getUrl(webServer))); var actionCreator = new AzureOpenAiActionCreator(sender, createWithEmptySettings(threadPool)); - var overriddenTaskSettings = getRequestTaskSettingsMap("overridden_user"); + var overriddenTaskSettings = createRequestTaskSettingsMap("overridden_user"); var action = (AzureOpenAiEmbeddingsAction) actionCreator.create(model, overriddenTaskSettings); PlainActionFuture listener = new PlainActionFuture<>(); @@ -371,13 +373,13 @@ public void testExecute_ReturnsSuccessfulResponse_AfterTruncating_From400StatusC validateRequestWithApiKey(webServer.requests().get(0), "apikey"); var requestMap = entityAsMap(webServer.requests().get(0).getBody()); - validateRequestMapWithUser(requestMap, List.of("abcd"), "overridden_user"); + validateEmbeddingsRequestMapWithUser(requestMap, List.of("abcd"), "overridden_user"); } { validateRequestWithApiKey(webServer.requests().get(1), "apikey"); var requestMap = entityAsMap(webServer.requests().get(1).getBody()); - validateRequestMapWithUser(requestMap, List.of("ab"), "overridden_user"); + validateEmbeddingsRequestMapWithUser(requestMap, List.of("ab"), "overridden_user"); } } catch (URISyntaxException e) { throw new RuntimeException(e); @@ -416,7 +418,7 @@ public void testExecute_TruncatesInputBeforeSending() throws IOException { var model = createModel("resource", "deployment", "apiversion", null, false, 1, null, null, "apikey", null, "id"); model.setUri(new URI(getUrl(webServer))); var actionCreator = new AzureOpenAiActionCreator(sender, createWithEmptySettings(threadPool)); - var overriddenTaskSettings = getRequestTaskSettingsMap("overridden_user"); + var overriddenTaskSettings = createRequestTaskSettingsMap("overridden_user"); var action = (AzureOpenAiEmbeddingsAction) actionCreator.create(model, overriddenTaskSettings); PlainActionFuture listener = new PlainActionFuture<>(); @@ -429,13 +431,186 @@ public void testExecute_TruncatesInputBeforeSending() throws IOException { validateRequestWithApiKey(webServer.requests().get(0), "apikey"); var requestMap = entityAsMap(webServer.requests().get(0).getBody()); - validateRequestMapWithUser(requestMap, List.of("sup"), "overridden_user"); + validateEmbeddingsRequestMapWithUser(requestMap, List.of("sup"), "overridden_user"); } catch (URISyntaxException e) { throw new RuntimeException(e); } } - private void validateRequestMapWithUser(Map requestMap, List input, @Nullable String user) { + public void testInfer_AzureOpenAiCompletion_WithOverriddenUser() throws IOException { + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + + try (var sender = senderFactory.createSender("test_service")) { + sender.start(); + + String responseJson = """ + { + "choices": [ + { + "finish_reason": "stop", + "index": 0, + "logprobs": null, + "message": { + "content": "response", + "role": "assistant" + } + } + ], + "model": "gpt-4", + "object": "chat.completion" + }"""; + + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var originalUser = "original_user"; + var overriddenUser = "overridden_user"; + var apiKey = "api_key"; + var completionInput = "some input"; + + var model = createCompletionModel("resource", "deployment", "apiversion", originalUser, apiKey, null, "id"); + model.setUri(new URI(getUrl(webServer))); + var actionCreator = new AzureOpenAiActionCreator(sender, createWithEmptySettings(threadPool)); + var taskSettingsWithUserOverride = createRequestTaskSettingsMap(overriddenUser); + var action = (AzureOpenAiCompletionAction) actionCreator.create(model, taskSettingsWithUserOverride); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(new DocumentsOnlyInput(List.of(completionInput)), InferenceAction.Request.DEFAULT_TIMEOUT, listener); + + var result = listener.actionGet(TIMEOUT); + + assertThat(webServer.requests(), hasSize(1)); + + var request = webServer.requests().get(0); + var requestMap = entityAsMap(request.getBody()); + + assertThat( + result.asMap(), + is(Map.of(ChatCompletionResults.COMPLETION, List.of(Map.of(ChatCompletionResults.Result.RESULT, "response")))) + ); + validateRequestWithApiKey(request, apiKey); + validateCompletionRequestMapWithUser(requestMap, List.of(completionInput), overriddenUser); + + } catch (URISyntaxException e) { + throw new RuntimeException(e); + } + } + + public void testInfer_AzureOpenAiCompletionModel_WithoutUser() throws IOException { + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + + try (var sender = senderFactory.createSender("test_service")) { + sender.start(); + + String responseJson = """ + { + "choices": [ + { + "finish_reason": "stop", + "index": 0, + "logprobs": null, + "message": { + "content": "response", + "role": "assistant" + } + } + ], + "model": "gpt-4", + "object": "chat.completion" + }"""; + + var completionInput = "some input"; + var apiKey = "api key"; + + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var model = createCompletionModel("resource", "deployment", "apiversion", null, apiKey, null, "id"); + model.setUri(new URI(getUrl(webServer))); + var actionCreator = new AzureOpenAiActionCreator(sender, createWithEmptySettings(threadPool)); + var requestTaskSettingsWithoutUser = createRequestTaskSettingsMap(null); + var action = (AzureOpenAiCompletionAction) actionCreator.create(model, requestTaskSettingsWithoutUser); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(new DocumentsOnlyInput(List.of(completionInput)), InferenceAction.Request.DEFAULT_TIMEOUT, listener); + + var result = listener.actionGet(TIMEOUT); + + assertThat(webServer.requests(), hasSize(1)); + + var request = webServer.requests().get(0); + var requestMap = entityAsMap(request.getBody()); + + assertThat( + result.asMap(), + is(Map.of(ChatCompletionResults.COMPLETION, List.of(Map.of(ChatCompletionResults.Result.RESULT, "response")))) + ); + validateRequestWithApiKey(request, apiKey); + validateCompletionRequestMapWithUser(requestMap, List.of(completionInput), null); + } catch (URISyntaxException e) { + throw new RuntimeException(e); + } + } + + public void testInfer_AzureOpenAiCompletionModel_FailsFromInvalidResponseFormat() throws IOException { + // timeout as zero for no retries + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager, ZERO_TIMEOUT_SETTINGS); + + try (var sender = senderFactory.createSender("test_service")) { + sender.start(); + + // "choices" missing + String responseJson = """ + { + "not_choices": [ + { + "finish_reason": "stop", + "index": 0, + "logprobs": null, + "message": { + "content": "response", + "role": "assistant" + } + } + ], + "model": "gpt-4", + "object": "chat.completion" + }"""; + + var completionInput = "some input"; + var apiKey = "api key"; + var userOverride = "overridden_user"; + + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var model = createCompletionModel("resource", "deployment", "apiversion", null, apiKey, null, "id"); + model.setUri(new URI(getUrl(webServer))); + var actionCreator = new AzureOpenAiActionCreator(sender, createWithEmptySettings(threadPool)); + var requestTaskSettingsWithoutUser = createRequestTaskSettingsMap(userOverride); + var action = (AzureOpenAiCompletionAction) actionCreator.create(model, requestTaskSettingsWithoutUser); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(new DocumentsOnlyInput(List.of(completionInput)), InferenceAction.Request.DEFAULT_TIMEOUT, listener); + + var thrownException = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT)); + assertThat( + thrownException.getMessage(), + is(format("Failed to send Azure OpenAI completion request to [%s]", getUrl(webServer))) + ); + assertThat( + thrownException.getCause().getMessage(), + is("Failed to find required field [choices] in Azure OpenAI completions response") + ); + + assertThat(webServer.requests(), hasSize(1)); + validateRequestWithApiKey(webServer.requests().get(0), apiKey); + + var requestMap = entityAsMap(webServer.requests().get(0).getBody()); + validateCompletionRequestMapWithUser(requestMap, List.of(completionInput), userOverride); + } catch (URISyntaxException e) { + throw new RuntimeException(e); + } + } + + private void validateEmbeddingsRequestMapWithUser(Map requestMap, List input, @Nullable String user) { var expectedSize = user == null ? 1 : 2; assertThat(requestMap.size(), is(expectedSize)); @@ -446,6 +621,24 @@ private void validateRequestMapWithUser(Map requestMap, List requestMap, List input, @Nullable String user) { + assertThat("input for completions can only be of size 1", input.size(), equalTo(1)); + + var expectedSize = user == null ? 2 : 3; + + assertThat(requestMap.size(), is(expectedSize)); + assertThat(getContentOfMessageInRequestMap(requestMap), is(input.get(0))); + + if (user != null) { + assertThat(requestMap.get("user"), is(user)); + } + } + + @SuppressWarnings("unchecked") + public static String getContentOfMessageInRequestMap(Map requestMap) { + return ((Map) ((List) requestMap.get("messages")).get(0)).get("content").toString(); + } + private void validateRequestWithApiKey(MockRequest request, String apiKey) { assertNull(request.getUri().getQuery()); assertThat(request.getHeader(HttpHeaders.CONTENT_TYPE), equalTo(XContentType.JSON.mediaType())); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiCompletionActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiCompletionActionTests.java new file mode 100644 index 0000000000000..96127841c17a8 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiCompletionActionTests.java @@ -0,0 +1,200 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.action.azureopenai; + +import org.apache.http.HttpHeaders; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.http.MockResponse; +import org.elasticsearch.test.http.MockWebServer; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.inference.action.InferenceAction; +import org.elasticsearch.xpack.core.inference.results.ChatCompletionResults; +import org.elasticsearch.xpack.inference.external.http.HttpClientManager; +import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput; +import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderTests; +import org.elasticsearch.xpack.inference.external.http.sender.Sender; +import org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiUtils; +import org.elasticsearch.xpack.inference.logging.ThrottlerManager; +import org.junit.After; +import org.junit.Before; + +import java.io.IOException; +import java.net.URI; +import java.net.URISyntaxException; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.core.Strings.format; +import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; +import static org.elasticsearch.xpack.inference.Utils.mockClusterServiceEmpty; +import static org.elasticsearch.xpack.inference.external.action.azureopenai.AzureOpenAiActionCreatorTests.getContentOfMessageInRequestMap; +import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; +import static org.elasticsearch.xpack.inference.external.http.Utils.getUrl; +import static org.elasticsearch.xpack.inference.services.ServiceComponentsTests.createWithEmptySettings; +import static org.elasticsearch.xpack.inference.services.azureopenai.completion.AzureOpenAiCompletionModelTests.createCompletionModel; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; + +public class AzureOpenAiCompletionActionTests extends ESTestCase { + + private static final TimeValue TIMEOUT = new TimeValue(30, TimeUnit.SECONDS); + private final MockWebServer webServer = new MockWebServer(); + private ThreadPool threadPool; + private HttpClientManager clientManager; + + @Before + public void init() throws Exception { + webServer.start(); + threadPool = createThreadPool(inferenceUtilityPool()); + clientManager = HttpClientManager.create(Settings.EMPTY, threadPool, mockClusterServiceEmpty(), mock(ThrottlerManager.class)); + } + + @After + public void shutdown() throws IOException { + clientManager.close(); + terminate(threadPool); + webServer.close(); + } + + public void testExecute_ReturnsSuccessfulResponse() throws IOException { + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + + try (var sender = senderFactory.createSender("test_service")) { + sender.start(); + + String responseJson = """ + { + "choices": [ + { + "finish_reason": "stop", + "index": 0, + "logprobs": null, + "message": { + "content": "response", + "role": "assistant" + } + } + ], + "model": "gpt-4", + "object": "chat.completion" + ] + }"""; + + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var user = "user"; + var apiKey = "api_key"; + var completionInput = "some input"; + + var action = createAction("resource", "deployment", "apiversion", user, apiKey, sender, "id"); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(new DocumentsOnlyInput(List.of(completionInput)), InferenceAction.Request.DEFAULT_TIMEOUT, listener); + + var result = listener.actionGet(TIMEOUT); + + assertThat(webServer.requests(), hasSize(1)); + + var request = webServer.requests().get(0); + assertNull(request.getUri().getQuery()); + assertThat(request.getHeader(HttpHeaders.CONTENT_TYPE), is(XContentType.JSON.mediaType())); + assertThat(request.getHeader(AzureOpenAiUtils.API_KEY_HEADER), is(apiKey)); + + assertThat( + result.asMap(), + is(Map.of(ChatCompletionResults.COMPLETION, List.of(Map.of(ChatCompletionResults.Result.RESULT, "response")))) + ); + + var requestMap = entityAsMap(request.getBody()); + assertThat(requestMap.size(), is(3)); + assertThat(getContentOfMessageInRequestMap(requestMap), is(completionInput)); + assertThat(requestMap.get("user"), is(user)); + assertThat(requestMap.get("n"), is(1)); + } + } + + public void testExecute_ThrowsElasticsearchException() { + var sender = mock(Sender.class); + doThrow(new ElasticsearchException("failed")).when(sender).send(any(), any(), any(), any()); + + var action = createAction("resource", "deployment", "apiVersion", "user", "apikey", sender, "id"); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(new DocumentsOnlyInput(List.of("abc")), InferenceAction.Request.DEFAULT_TIMEOUT, listener); + + var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); + + assertThat(thrownException.getMessage(), is("failed")); + } + + public void testExecute_ThrowsElasticsearchException_WhenSenderOnFailureIsCalled() { + var sender = mock(Sender.class); + + doAnswer(invocation -> { + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) invocation.getArguments()[1]; + listener.onFailure(new IllegalStateException("failed")); + + return Void.TYPE; + }).when(sender).send(any(), any(), any(), any()); + + var action = createAction("resource", "deployment", "apiVersion", "user", "apikey", sender, "id"); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(new DocumentsOnlyInput(List.of("abc")), InferenceAction.Request.DEFAULT_TIMEOUT, listener); + + var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); + + assertThat(thrownException.getMessage(), is(format("Failed to send Azure OpenAI completion request to [%s]", getUrl(webServer)))); + } + + public void testExecute_ThrowsException() { + var sender = mock(Sender.class); + doThrow(new IllegalArgumentException("failed")).when(sender).send(any(), any(), any(), any()); + + var action = createAction("resource", "deployment", "apiVersion", "user", "apikey", sender, "id"); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(new DocumentsOnlyInput(List.of("abc")), InferenceAction.Request.DEFAULT_TIMEOUT, listener); + + var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); + + assertThat(thrownException.getMessage(), is(format("Failed to send Azure OpenAI completion request to [%s]", getUrl(webServer)))); + } + + private AzureOpenAiCompletionAction createAction( + String resourceName, + String deploymentId, + String apiVersion, + @Nullable String user, + String apiKey, + Sender sender, + String inferenceEntityId + ) { + try { + var model = createCompletionModel(resourceName, deploymentId, apiVersion, user, apiKey, null, inferenceEntityId); + model.setUri(new URI(getUrl(webServer))); + return new AzureOpenAiCompletionAction(sender, model, createWithEmptySettings(threadPool)); + } catch (URISyntaxException e) { + throw new RuntimeException(e); + } + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiActionCreatorTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiActionCreatorTests.java index 98eff32f72983..ff2448803d7ce 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiActionCreatorTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiActionCreatorTests.java @@ -45,7 +45,7 @@ import static org.elasticsearch.xpack.inference.services.openai.completion.OpenAiChatCompletionModelTests.createChatCompletionModel; import static org.elasticsearch.xpack.inference.services.openai.completion.OpenAiChatCompletionRequestTaskSettingsTests.getChatCompletionRequestTaskSettingsMap; import static org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsModelTests.createModel; -import static org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsRequestTaskSettingsTests.getRequestTaskSettingsMap; +import static org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsRequestTaskSettingsTests.createRequestTaskSettingsMap; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; @@ -101,7 +101,7 @@ public void testCreate_OpenAiEmbeddingsModel() throws IOException { var model = createModel(getUrl(webServer), "org", "secret", "model", "user"); var actionCreator = new OpenAiActionCreator(sender, createWithEmptySettings(threadPool)); - var overriddenTaskSettings = getRequestTaskSettingsMap("overridden_user"); + var overriddenTaskSettings = createRequestTaskSettingsMap("overridden_user"); var action = actionCreator.create(model, overriddenTaskSettings); PlainActionFuture listener = new PlainActionFuture<>(); @@ -154,7 +154,7 @@ public void testCreate_OpenAiEmbeddingsModel_WithoutUser() throws IOException { var model = createModel(getUrl(webServer), "org", "secret", "model", null); var actionCreator = new OpenAiActionCreator(sender, createWithEmptySettings(threadPool)); - var overriddenTaskSettings = getRequestTaskSettingsMap(null); + var overriddenTaskSettings = createRequestTaskSettingsMap(null); var action = actionCreator.create(model, overriddenTaskSettings); PlainActionFuture listener = new PlainActionFuture<>(); @@ -206,7 +206,7 @@ public void testCreate_OpenAiEmbeddingsModel_WithoutOrganization() throws IOExce var model = createModel(getUrl(webServer), null, "secret", "model", null); var actionCreator = new OpenAiActionCreator(sender, createWithEmptySettings(threadPool)); - var overriddenTaskSettings = getRequestTaskSettingsMap("overridden_user"); + var overriddenTaskSettings = createRequestTaskSettingsMap("overridden_user"); var action = actionCreator.create(model, overriddenTaskSettings); PlainActionFuture listener = new PlainActionFuture<>(); @@ -265,7 +265,7 @@ public void testCreate_OpenAiEmbeddingsModel_FailsFromInvalidResponseFormat() th var model = createModel(getUrl(webServer), null, "secret", "model", null); var actionCreator = new OpenAiActionCreator(sender, createWithEmptySettings(threadPool)); - var overriddenTaskSettings = getRequestTaskSettingsMap("overridden_user"); + var overriddenTaskSettings = createRequestTaskSettingsMap("overridden_user"); var action = actionCreator.create(model, overriddenTaskSettings); PlainActionFuture listener = new PlainActionFuture<>(); @@ -595,7 +595,7 @@ public void testExecute_ReturnsSuccessfulResponse_AfterTruncating_From413StatusC var model = createModel(getUrl(webServer), "org", "secret", "model", "user"); var actionCreator = new OpenAiActionCreator(sender, createWithEmptySettings(threadPool)); - var overriddenTaskSettings = getRequestTaskSettingsMap("overridden_user"); + var overriddenTaskSettings = createRequestTaskSettingsMap("overridden_user"); var action = actionCreator.create(model, overriddenTaskSettings); PlainActionFuture listener = new PlainActionFuture<>(); @@ -678,7 +678,7 @@ public void testExecute_ReturnsSuccessfulResponse_AfterTruncating_From400StatusC var model = createModel(getUrl(webServer), "org", "secret", "model", "user"); var actionCreator = new OpenAiActionCreator(sender, createWithEmptySettings(threadPool)); - var overriddenTaskSettings = getRequestTaskSettingsMap("overridden_user"); + var overriddenTaskSettings = createRequestTaskSettingsMap("overridden_user"); var action = actionCreator.create(model, overriddenTaskSettings); PlainActionFuture listener = new PlainActionFuture<>(); @@ -746,7 +746,7 @@ public void testExecute_TruncatesInputBeforeSending() throws IOException { // truncated to 1 token = 3 characters var model = createModel(getUrl(webServer), "org", "secret", "model", "user", 1); var actionCreator = new OpenAiActionCreator(sender, createWithEmptySettings(threadPool)); - var overriddenTaskSettings = getRequestTaskSettingsMap("overridden_user"); + var overriddenTaskSettings = createRequestTaskSettingsMap("overridden_user"); var action = actionCreator.create(model, overriddenTaskSettings); PlainActionFuture listener = new PlainActionFuture<>(); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiChatCompletionActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiChatCompletionActionTests.java index b802403dcd28d..e28c3e817b351 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiChatCompletionActionTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiChatCompletionActionTests.java @@ -31,7 +31,6 @@ import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderTests; import org.elasticsearch.xpack.inference.external.http.sender.Sender; import org.elasticsearch.xpack.inference.logging.ThrottlerManager; -import org.hamcrest.CoreMatchers; import org.junit.After; import org.junit.Before; @@ -272,8 +271,8 @@ public void testExecute_ThrowsException_WhenInputIsGreaterThanOne() throws IOExc var thrownException = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT)); - assertThat(thrownException.getMessage(), CoreMatchers.is("OpenAI completions only accepts 1 input")); - assertThat(thrownException.status(), CoreMatchers.is(RestStatus.BAD_REQUEST)); + assertThat(thrownException.getMessage(), is("OpenAI completions only accepts 1 input")); + assertThat(thrownException.status(), is(RestStatus.BAD_REQUEST)); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiRequestTests.java new file mode 100644 index 0000000000000..2d37f273e1de2 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiRequestTests.java @@ -0,0 +1,62 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.azureopenai; + +import org.apache.http.HttpHeaders; +import org.apache.http.client.methods.HttpPost; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiSecretSettings; + +import static org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiRequest.MISSING_AUTHENTICATION_ERROR_MESSAGE; +import static org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiUtils.API_KEY_HEADER; +import static org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiSecretSettings.API_KEY; +import static org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiSecretSettings.ENTRA_ID; +import static org.hamcrest.Matchers.equalTo; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class AzureOpenAiRequestTests extends ESTestCase { + + public void testDecorateWithAuthHeader_apiKeyPresent() { + var apiKey = randomSecureStringOfLength(10); + var httpPost = new HttpPost(); + var secretSettings = new AzureOpenAiSecretSettings(apiKey, null); + + AzureOpenAiRequest.decorateWithAuthHeader(httpPost, secretSettings); + var apiKeyHeader = httpPost.getFirstHeader(API_KEY_HEADER); + + assertThat(apiKeyHeader.getValue(), equalTo(apiKey.toString())); + } + + public void testDecorateWithAuthHeader_entraIdPresent() { + var entraId = randomSecureStringOfLength(10); + var httpPost = new HttpPost(); + var secretSettings = new AzureOpenAiSecretSettings(null, entraId); + + AzureOpenAiRequest.decorateWithAuthHeader(httpPost, secretSettings); + var authHeader = httpPost.getFirstHeader(HttpHeaders.AUTHORIZATION); + + assertThat(authHeader.getValue(), equalTo("Bearer " + entraId)); + } + + public void testDecorateWithAuthHeader_entraIdAndApiKeyMissing_throwMissingAuthValidationException() { + var httpPost = new HttpPost(); + var secretSettingsMock = mock(AzureOpenAiSecretSettings.class); + + when(secretSettingsMock.entraId()).thenReturn(null); + when(secretSettingsMock.apiKey()).thenReturn(null); + + ValidationException exception = expectThrows( + ValidationException.class, + () -> AzureOpenAiRequest.decorateWithAuthHeader(httpPost, secretSettingsMock) + ); + assertTrue(exception.getMessage().contains(Strings.format(MISSING_AUTHENTICATION_ERROR_MESSAGE, API_KEY, ENTRA_ID))); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/completion/AzureOpenAiCompletionRequestEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/completion/AzureOpenAiCompletionRequestEntityTests.java new file mode 100644 index 0000000000000..7647a4983f4be --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/completion/AzureOpenAiCompletionRequestEntityTests.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.azureopenai.completion; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiCompletionRequestEntity; + +import java.io.IOException; +import java.util.List; + +import static org.hamcrest.CoreMatchers.is; + +public class AzureOpenAiCompletionRequestEntityTests extends ESTestCase { + + public void testXContent_WritesSingleMessage_DoesNotWriteUserWhenItIsNull() throws IOException { + var entity = new AzureOpenAiCompletionRequestEntity(List.of("input"), null); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + entity.toXContent(builder, null); + String xContentResult = Strings.toString(builder); + + assertThat(xContentResult, is(""" + {"messages":[{"role":"user","content":"input"}],"n":1}""")); + } + + public void testXContent_WritesSingleMessage_WriteUserWhenItIsNull() throws IOException { + var entity = new AzureOpenAiCompletionRequestEntity(List.of("input"), "user"); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + entity.toXContent(builder, null); + String xContentResult = Strings.toString(builder); + + assertThat(xContentResult, is(""" + {"messages":[{"role":"user","content":"input"}],"n":1,"user":"user"}""")); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/completion/AzureOpenAiCompletionRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/completion/AzureOpenAiCompletionRequestTests.java new file mode 100644 index 0000000000000..048d4ea16d56f --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/completion/AzureOpenAiCompletionRequestTests.java @@ -0,0 +1,100 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.azureopenai.completion; + +import org.apache.http.HttpHeaders; +import org.apache.http.client.methods.HttpPost; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiCompletionRequest; +import org.elasticsearch.xpack.inference.services.azureopenai.completion.AzureOpenAiCompletionModelTests; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.xpack.inference.external.action.azureopenai.AzureOpenAiActionCreatorTests.getContentOfMessageInRequestMap; +import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; +import static org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiUtils.API_KEY_HEADER; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; + +public class AzureOpenAiCompletionRequestTests extends ESTestCase { + + public void testCreateRequest_WithApiKeyDefined() throws IOException { + var input = "input"; + var user = "user"; + var apiKey = randomAlphaOfLength(10); + + var request = createRequest("resource", "deployment", "2024", apiKey, null, input, user); + var httpRequest = request.createHttpRequest(); + + assertThat(httpRequest.httpRequestBase(), instanceOf(HttpPost.class)); + var httpPost = (HttpPost) httpRequest.httpRequestBase(); + + assertThat( + httpPost.getURI().toString(), + is("https://resource.openai.azure.com/openai/deployments/deployment/chat/completions?api-version=2024") + ); + + assertThat(httpPost.getLastHeader(HttpHeaders.CONTENT_TYPE).getValue(), is(XContentType.JSON.mediaType())); + assertThat(httpPost.getLastHeader(API_KEY_HEADER).getValue(), is(apiKey)); + + var requestMap = entityAsMap(httpPost.getEntity().getContent()); + assertThat(getContentOfMessageInRequestMap(requestMap), is(input)); + assertThat(requestMap.get("user"), is(user)); + assertThat(requestMap.get("n"), is(1)); + } + + public void testCreateRequest_WithEntraIdDefined() throws IOException { + var input = "input"; + var user = "user"; + var entraId = randomAlphaOfLength(10); + + var request = createRequest("resource", "deployment", "2024", null, entraId, input, user); + var httpRequest = request.createHttpRequest(); + + assertThat(httpRequest.httpRequestBase(), instanceOf(HttpPost.class)); + var httpPost = (HttpPost) httpRequest.httpRequestBase(); + + assertThat( + httpPost.getURI().toString(), + is("https://resource.openai.azure.com/openai/deployments/deployment/chat/completions?api-version=2024") + ); + + assertThat(httpPost.getLastHeader(HttpHeaders.CONTENT_TYPE).getValue(), is(XContentType.JSON.mediaType())); + assertThat(httpPost.getLastHeader(HttpHeaders.AUTHORIZATION).getValue(), is("Bearer " + entraId)); + + var requestMap = entityAsMap(httpPost.getEntity().getContent()); + assertThat(getContentOfMessageInRequestMap(requestMap), is(input)); + assertThat(requestMap.get("user"), is(user)); + assertThat(requestMap.get("n"), is(1)); + } + + protected AzureOpenAiCompletionRequest createRequest( + String resource, + String deployment, + String apiVersion, + String apiKey, + String entraId, + String input, + String user + ) { + var completionModel = AzureOpenAiCompletionModelTests.createCompletionModel( + resource, + deployment, + apiVersion, + user, + apiKey, + entraId, + "id" + ); + + return new AzureOpenAiCompletionRequest(List.of(input), completionModel); + } + +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiEmbeddingsRequestEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/embeddings/AzureOpenAiEmbeddingsRequestEntityTests.java similarity index 96% rename from x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiEmbeddingsRequestEntityTests.java rename to x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/embeddings/AzureOpenAiEmbeddingsRequestEntityTests.java index 14283ed53eed9..f732a01c893e8 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiEmbeddingsRequestEntityTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/embeddings/AzureOpenAiEmbeddingsRequestEntityTests.java @@ -5,13 +5,14 @@ * 2.0. */ -package org.elasticsearch.xpack.inference.external.request.azureopenai; +package org.elasticsearch.xpack.inference.external.request.azureopenai.embeddings; import org.elasticsearch.common.Strings; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiEmbeddingsRequestEntity; import java.io.IOException; import java.util.List; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiEmbeddingsRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/embeddings/AzureOpenAiEmbeddingsRequestTests.java similarity index 73% rename from x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiEmbeddingsRequestTests.java rename to x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/embeddings/AzureOpenAiEmbeddingsRequestTests.java index 88e6880b72f0b..bbd8a49d65f46 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiEmbeddingsRequestTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/embeddings/AzureOpenAiEmbeddingsRequestTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.inference.external.request.azureopenai; +package org.elasticsearch.xpack.inference.external.request.azureopenai.embeddings; import org.apache.http.HttpHeaders; import org.apache.http.client.methods.HttpPost; @@ -14,56 +14,69 @@ import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.inference.common.Truncator; import org.elasticsearch.xpack.inference.common.TruncatorTests; -import org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsModel; +import org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiEmbeddingsRequest; import org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsModelTests; import java.io.IOException; -import java.net.URISyntaxException; import java.util.List; import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; import static org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiUtils.API_KEY_HEADER; import static org.hamcrest.Matchers.aMapWithSize; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; public class AzureOpenAiEmbeddingsRequestTests extends ESTestCase { - public void testCreateRequest_WithApiKeyDefined() throws IOException, URISyntaxException { - var request = createRequest("resource", "deployment", "apiVersion", "apikey", null, "abc", "user"); + + public void testCreateRequest_WithApiKeyDefined() throws IOException { + var input = "input"; + var user = "user"; + var apiKey = randomAlphaOfLength(10); + + var request = createRequest("resource", "deployment", "2024", apiKey, null, input, user); var httpRequest = request.createHttpRequest(); assertThat(httpRequest.httpRequestBase(), instanceOf(HttpPost.class)); var httpPost = (HttpPost) httpRequest.httpRequestBase(); - var expectedUri = AzureOpenAiEmbeddingsModel.getEmbeddingsUri("resource", "deployment", "apiVersion").toString(); - assertThat(httpPost.getURI().toString(), is(expectedUri)); + assertThat( + httpPost.getURI().toString(), + is("https://resource.openai.azure.com/openai/deployments/deployment/embeddings?api-version=2024") + ); assertThat(httpPost.getLastHeader(HttpHeaders.CONTENT_TYPE).getValue(), is(XContentType.JSON.mediaType())); - assertThat(httpPost.getLastHeader(API_KEY_HEADER).getValue(), is("apikey")); + assertThat(httpPost.getLastHeader(API_KEY_HEADER).getValue(), is(apiKey)); var requestMap = entityAsMap(httpPost.getEntity().getContent()); - assertThat(requestMap, aMapWithSize(2)); - assertThat(requestMap.get("input"), is(List.of("abc"))); - assertThat(requestMap.get("user"), is("user")); + assertThat(requestMap.size(), equalTo(2)); + assertThat(requestMap.get("input"), is(List.of(input))); + assertThat(requestMap.get("user"), is(user)); } - public void testCreateRequest_WithEntraIdDefined() throws IOException, URISyntaxException { - var request = createRequest("resource", "deployment", "apiVersion", null, "entraId", "abc", "user"); + public void testCreateRequest_WithEntraIdDefined() throws IOException { + var input = "input"; + var user = "user"; + var entraId = randomAlphaOfLength(10); + + var request = createRequest("resource", "deployment", "2024", null, entraId, input, user); var httpRequest = request.createHttpRequest(); assertThat(httpRequest.httpRequestBase(), instanceOf(HttpPost.class)); var httpPost = (HttpPost) httpRequest.httpRequestBase(); - var expectedUri = AzureOpenAiEmbeddingsModel.getEmbeddingsUri("resource", "deployment", "apiVersion").toString(); - assertThat(httpPost.getURI().toString(), is(expectedUri)); + assertThat( + httpPost.getURI().toString(), + is("https://resource.openai.azure.com/openai/deployments/deployment/embeddings?api-version=2024") + ); assertThat(httpPost.getLastHeader(HttpHeaders.CONTENT_TYPE).getValue(), is(XContentType.JSON.mediaType())); - assertThat(httpPost.getLastHeader(HttpHeaders.AUTHORIZATION).getValue(), is("Bearer entraId")); + assertThat(httpPost.getLastHeader(HttpHeaders.AUTHORIZATION).getValue(), is("Bearer " + entraId)); var requestMap = entityAsMap(httpPost.getEntity().getContent()); - assertThat(requestMap, aMapWithSize(2)); - assertThat(requestMap.get("input"), is(List.of("abc"))); - assertThat(requestMap.get("user"), is("user")); + assertThat(requestMap.size(), equalTo(2)); + assertThat(requestMap.get("input"), is(List.of(input))); + assertThat(requestMap.get("user"), is(user)); } public void testTruncate_ReducesInputTextSizeByHalf() throws IOException { @@ -87,7 +100,7 @@ public void testIsTruncated_ReturnsTrue() { assertTrue(truncatedRequest.getTruncationInfo()[0]); } - public static AzureOpenAiEmbeddingsRequest createRequest( + public AzureOpenAiEmbeddingsRequest createRequest( String resourceName, String deploymentId, String apiVersion, diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/XContentUtilsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/XContentUtilsTests.java index 4f7cd9ea89a14..897c648eb942f 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/XContentUtilsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/XContentUtilsTests.java @@ -106,6 +106,24 @@ public void testPositionParserAtTokenAfterField_ThrowsWithMalformedJSON() throws } } + public void testPositionParserAtTokenAfterField_ConsumesUntilEnd() throws IOException { + var json = """ + { + "key": { + "foo": "bar" + }, + "target": "value" + } + """; + + var errorFormat = "Error: %s"; + + try (XContentParser parser = createParser(XContentType.JSON.xContent(), json)) { + XContentUtils.positionParserAtTokenAfterField(parser, "target", errorFormat); + assertEquals("value", parser.text()); + } + } + public void testConsumeUntilObjectEnd() throws IOException { var json = """ { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/azureopenai/AzureOpenAiCompletionResponseEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/azureopenai/AzureOpenAiCompletionResponseEntityTests.java new file mode 100644 index 0000000000000..ec76f43a6d52f --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/azureopenai/AzureOpenAiCompletionResponseEntityTests.java @@ -0,0 +1,217 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.response.azureopenai; + +import org.apache.http.HttpResponse; +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.inference.results.ChatCompletionResults; +import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.request.Request; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; + +import static org.hamcrest.Matchers.is; +import static org.mockito.Mockito.mock; + +public class AzureOpenAiCompletionResponseEntityTests extends ESTestCase { + + public void testFromResponse_CreatesResultsForASingleItem() throws IOException { + String responseJson = """ + { + "choices": [ + { + "content_filter_results": { + "hate": { + "filtered": false, + "severity": "safe" + }, + "self_harm": { + "filtered": false, + "severity": "safe" + }, + "sexual": { + "filtered": false, + "severity": "safe" + }, + "violence": { + "filtered": false, + "severity": "safe" + } + }, + "finish_reason": "stop", + "index": 0, + "logprobs": null, + "message": { + "content": "result", + "role": "assistant" + } + } + ], + "model": "gpt-4", + "object": "chat.completion", + "prompt_filter_results": [ + { + "prompt_index": 0, + "content_filter_results": { + "hate": { + "filtered": false, + "severity": "safe" + }, + "self_harm": { + "filtered": false, + "severity": "safe" + }, + "sexual": { + "filtered": false, + "severity": "safe" + }, + "violence": { + "filtered": false, + "severity": "safe" + } + } + } + ], + "usage": { + "completion_tokens": 138, + "prompt_tokens": 11, + "total_tokens": 149 + } + }"""; + + ChatCompletionResults chatCompletionResults = AzureOpenAiCompletionResponseEntity.fromResponse( + mock(Request.class), + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ); + + assertThat(chatCompletionResults.getResults().size(), is(1)); + assertThat(chatCompletionResults.getResults().get(0).content(), is("result")); + } + + public void testFromResponse_FailsWhenChoicesFieldIsNotPresent() { + String responseJson = """ + { + "not_choices": [ + { + "finish_reason": "stop", + "index": 0, + "logprobs": null, + "message": { + "content": "response", + "role": "assistant" + } + } + ], + "model": "gpt-4", + "object": "chat.completion" + }"""; + + var thrownException = expectThrows( + IllegalStateException.class, + () -> AzureOpenAiCompletionResponseEntity.fromResponse( + mock(Request.class), + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ) + ); + + assertThat(thrownException.getMessage(), is("Failed to find required field [choices] in Azure OpenAI completions response")); + } + + public void testFromResponse_FailsWhenChoicesFieldIsNotAnArray() { + String responseJson = """ + { + "choices": { + "finish_reason": "stop", + "index": 0, + "logprobs": null, + "message": { + "content": "response", + "role": "assistant" + } + }, + "model": "gpt-4", + "object": "chat.completion" + ] + }"""; + + var thrownException = expectThrows( + ParsingException.class, + () -> AzureOpenAiCompletionResponseEntity.fromResponse( + mock(Request.class), + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ) + ); + + assertThat( + thrownException.getMessage(), + is("Failed to parse object: expecting token of type [START_OBJECT] but found [FIELD_NAME]") + ); + } + + public void testFromResponse_FailsWhenMessageDoesNotExist() { + String responseJson = """ + { + "choices": [ + { + "finish_reason": "stop", + "index": 0, + "logprobs": null, + "not_message": { + "content": "response", + "role": "assistant" + } + } + ], + "model": "gpt-4", + "object": "chat.completion" + }"""; + + var thrownException = expectThrows( + IllegalStateException.class, + () -> AzureOpenAiCompletionResponseEntity.fromResponse( + mock(Request.class), + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ) + ); + + assertThat(thrownException.getMessage(), is("Failed to find required field [message] in Azure OpenAI completions response")); + } + + public void testFromResponse_FailsWhenMessageValueIsAString() { + String responseJson = """ + { + "choices": [ + { + "finish_reason": "stop", + "index": 0, + "logprobs": null, + "message": "string" + } + ], + "model": "gpt-4", + "object": "chat.completion" + ] + }"""; + + var thrownException = expectThrows( + ParsingException.class, + () -> AzureOpenAiCompletionResponseEntity.fromResponse( + mock(Request.class), + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ) + ); + + assertThat( + thrownException.getMessage(), + is("Failed to parse object: expecting token of type [START_OBJECT] but found [VALUE_STRING]") + ); + } + +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiChatCompletionResponseEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiChatCompletionResponseEntityTests.java index 18f702014e2d8..5604d6573144e 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiChatCompletionResponseEntityTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiChatCompletionResponseEntityTests.java @@ -17,7 +17,6 @@ import java.io.IOException; import java.nio.charset.StandardCharsets; -import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.mockito.Mockito.mock; @@ -35,7 +34,7 @@ public void testFromResponse_CreatesResultsForASingleItem() throws IOException { "index": 0, "message": { "role": "assistant", - "content": "some content" + "content": "result" }, "logprobs": null, "finish_reason": "stop" @@ -55,7 +54,8 @@ public void testFromResponse_CreatesResultsForASingleItem() throws IOException { new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); - assertThat(chatCompletionResults.getResults().size(), equalTo(1)); + assertThat(chatCompletionResults.getResults().size(), is(1)); + assertThat(chatCompletionResults.getResults().get(0).content(), is("result")); } public void testFromResponse_FailsWhenChoicesFieldIsNotPresent() { @@ -74,7 +74,7 @@ public void testFromResponse_FailsWhenChoicesFieldIsNotPresent() { }, "logprobs": null, "finish_reason": "stop" - }, + } ], "usage": { "prompt_tokens": 46, @@ -112,7 +112,7 @@ public void testFromResponse_FailsWhenChoicesFieldNotAnArray() { }, "logprobs": null, "finish_reason": "stop" - }, + } }, "usage": { "prompt_tokens": 46, @@ -153,7 +153,7 @@ public void testFromResponse_FailsWhenMessageDoesNotExist() { }, "logprobs": null, "finish_reason": "stop" - }, + } ], "usage": { "prompt_tokens": 46, diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiSecretSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiSecretSettingsTests.java index 97fa6efc962bb..d2b83d7b14e2b 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiSecretSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiSecretSettingsTests.java @@ -16,7 +16,6 @@ import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentType; -import org.hamcrest.CoreMatchers; import java.io.IOException; import java.util.HashMap; @@ -119,7 +118,7 @@ public void testToXContext_WritesApiKeyOnlyWhenEntraIdIsNull() throws IOExceptio String xContentResult = Strings.toString(builder); var expectedResult = Strings.format("{\"%s\":\"apikey\"}", API_KEY); - assertThat(xContentResult, CoreMatchers.is(expectedResult)); + assertThat(xContentResult, is(expectedResult)); } public void testToXContext_WritesEntraIdOnlyWhenApiKeyIsNull() throws IOException { @@ -129,7 +128,7 @@ public void testToXContext_WritesEntraIdOnlyWhenApiKeyIsNull() throws IOExceptio String xContentResult = Strings.toString(builder); var expectedResult = Strings.format("{\"%s\":\"entraid\"}", ENTRA_ID); - assertThat(xContentResult, CoreMatchers.is(expectedResult)); + assertThat(xContentResult, is(expectedResult)); } @Override diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionModelTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionModelTests.java new file mode 100644 index 0000000000000..93d948a5bdcf3 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionModelTests.java @@ -0,0 +1,142 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.azureopenai.completion; + +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiSecretSettings; +import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiServiceFields; + +import java.net.URISyntaxException; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.sameInstance; + +public class AzureOpenAiCompletionModelTests extends ESTestCase { + + public void testOverrideWith_UpdatedTaskSettings_OverridesUser() { + var resource = "resource"; + var deploymentId = "deployment"; + var apiVersion = "api version"; + var apiKey = "api key"; + var entraId = "entra id"; + var inferenceEntityId = "inference entity id"; + + var user = "user"; + var userOverride = "user override"; + + var model = createCompletionModel(resource, deploymentId, apiVersion, user, apiKey, entraId, inferenceEntityId); + var requestTaskSettingsMap = taskSettingsMap(userOverride); + var overriddenModel = AzureOpenAiCompletionModel.of(model, requestTaskSettingsMap); + + assertThat( + overriddenModel, + equalTo(createCompletionModel(resource, deploymentId, apiVersion, userOverride, apiKey, entraId, inferenceEntityId)) + ); + } + + public void testOverrideWith_EmptyMap_OverridesNothing() { + var model = createCompletionModel("resource", "deployment", "api version", "user", "api key", "entra id", "inference entity id"); + var requestTaskSettingsMap = Map.of(); + var overriddenModel = AzureOpenAiCompletionModel.of(model, requestTaskSettingsMap); + + assertThat(overriddenModel, sameInstance(model)); + } + + public void testOverrideWith_NullMap_OverridesNothing() { + var model = createCompletionModel("resource", "deployment", "api version", "user", "api key", "entra id", "inference entity id"); + var overriddenModel = AzureOpenAiCompletionModel.of(model, null); + + assertThat(overriddenModel, sameInstance(model)); + } + + public void testOverrideWith_UpdatedServiceSettings_OverridesApiVersion() { + var resource = "resource"; + var deploymentId = "deployment"; + var apiKey = "api key"; + var user = "user"; + var entraId = "entra id"; + var inferenceEntityId = "inference entity id"; + + var apiVersion = "api version"; + var updatedApiVersion = "updated api version"; + + var updatedServiceSettings = new AzureOpenAiCompletionServiceSettings(resource, deploymentId, updatedApiVersion, null); + + var model = createCompletionModel(resource, deploymentId, apiVersion, user, apiKey, entraId, inferenceEntityId); + var overriddenModel = new AzureOpenAiCompletionModel(model, updatedServiceSettings); + + assertThat( + overriddenModel, + is(createCompletionModel(resource, deploymentId, updatedApiVersion, user, apiKey, entraId, inferenceEntityId)) + ); + } + + public void testBuildUriString() throws URISyntaxException { + var resource = "resource"; + var deploymentId = "deployment"; + var apiKey = "api key"; + var user = "user"; + var entraId = "entra id"; + var inferenceEntityId = "inference entity id"; + var apiVersion = "2024"; + + var model = createCompletionModel(resource, deploymentId, apiVersion, user, apiKey, entraId, inferenceEntityId); + + assertThat( + model.buildUriString().toString(), + is("https://resource.openai.azure.com/openai/deployments/deployment/chat/completions?api-version=2024") + ); + } + + public static AzureOpenAiCompletionModel createModelWithRandomValues() { + return createCompletionModel( + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10) + ); + } + + public static AzureOpenAiCompletionModel createCompletionModel( + String resourceName, + String deploymentId, + String apiVersion, + String user, + @Nullable String apiKey, + @Nullable String entraId, + String inferenceEntityId + ) { + var secureApiKey = apiKey != null ? new SecureString(apiKey.toCharArray()) : null; + var secureEntraId = entraId != null ? new SecureString(entraId.toCharArray()) : null; + + return new AzureOpenAiCompletionModel( + inferenceEntityId, + TaskType.COMPLETION, + "service", + new AzureOpenAiCompletionServiceSettings(resourceName, deploymentId, apiVersion, null), + new AzureOpenAiCompletionTaskSettings(user), + new AzureOpenAiSecretSettings(secureApiKey, secureEntraId) + ); + } + + private Map taskSettingsMap(String user) { + Map taskSettingsMap = new HashMap<>(); + taskSettingsMap.put(AzureOpenAiServiceFields.USER, user); + return taskSettingsMap; + } + +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionRequestTaskSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionRequestTaskSettingsTests.java new file mode 100644 index 0000000000000..51963c275a08a --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionRequestTaskSettingsTests.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.azureopenai.completion; + +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiServiceFields; + +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.is; + +public class AzureOpenAiCompletionRequestTaskSettingsTests extends ESTestCase { + + public void testFromMap_ReturnsEmptySettings_WhenMapIsEmpty() { + var settings = AzureOpenAiCompletionRequestTaskSettings.fromMap(new HashMap<>(Map.of())); + assertThat(settings, is(AzureOpenAiCompletionRequestTaskSettings.EMPTY_SETTINGS)); + } + + public void testFromMap_ReturnsEmptySettings_WhenMapDoesNotContainKnownFields() { + var settings = AzureOpenAiCompletionRequestTaskSettings.fromMap(new HashMap<>(Map.of("key", "model"))); + assertThat(settings, is(AzureOpenAiCompletionRequestTaskSettings.EMPTY_SETTINGS)); + } + + public void testFromMap_ReturnsUser() { + var settings = AzureOpenAiCompletionRequestTaskSettings.fromMap(new HashMap<>(Map.of(AzureOpenAiServiceFields.USER, "user"))); + assertThat(settings.user(), is("user")); + } + + public void testFromMap_WhenUserIsEmpty_ThrowsValidationException() { + var exception = expectThrows( + ValidationException.class, + () -> AzureOpenAiCompletionRequestTaskSettings.fromMap(new HashMap<>(Map.of(AzureOpenAiServiceFields.USER, ""))) + ); + + assertThat(exception.getMessage(), containsString("[user] must be a non-empty string")); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionServiceSettingsTests.java new file mode 100644 index 0000000000000..cbaa41c37958d --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionServiceSettingsTests.java @@ -0,0 +1,92 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.azureopenai.completion; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiServiceFields; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.is; + +public class AzureOpenAiCompletionServiceSettingsTests extends AbstractWireSerializingTestCase { + + private static AzureOpenAiCompletionServiceSettings createRandom() { + var resourceName = randomAlphaOfLength(8); + var deploymentId = randomAlphaOfLength(8); + var apiVersion = randomAlphaOfLength(8); + + return new AzureOpenAiCompletionServiceSettings(resourceName, deploymentId, apiVersion, null); + } + + public void testFromMap_Request_CreatesSettingsCorrectly() { + var resourceName = "this-resource"; + var deploymentId = "this-deployment"; + var apiVersion = "2024-01-01"; + + var serviceSettings = AzureOpenAiCompletionServiceSettings.fromMap( + new HashMap<>( + Map.of( + AzureOpenAiServiceFields.RESOURCE_NAME, + resourceName, + AzureOpenAiServiceFields.DEPLOYMENT_ID, + deploymentId, + AzureOpenAiServiceFields.API_VERSION, + apiVersion + ) + ) + ); + + assertThat(serviceSettings, is(new AzureOpenAiCompletionServiceSettings(resourceName, deploymentId, apiVersion, null))); + } + + public void testToXContent_WritesAllValues() throws IOException { + var entity = new AzureOpenAiCompletionServiceSettings("resource", "deployment", "2024", null); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + entity.toXContent(builder, null); + String xContentResult = Strings.toString(builder); + + assertThat(xContentResult, is(""" + {"resource_name":"resource","deployment_id":"deployment","api_version":"2024","rate_limit":{"requests_per_minute":120}}""")); + } + + public void testToFilteredXContent_WritesAllValues_Except_RateLimit() throws IOException { + var entity = new AzureOpenAiCompletionServiceSettings("resource", "deployment", "2024", null); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + var filteredXContent = entity.getFilteredXContentObject(); + filteredXContent.toXContent(builder, null); + String xContentResult = Strings.toString(builder); + + assertThat(xContentResult, is(""" + {"resource_name":"resource","deployment_id":"deployment","api_version":"2024"}""")); + } + + @Override + protected Writeable.Reader instanceReader() { + return AzureOpenAiCompletionServiceSettings::new; + } + + @Override + protected AzureOpenAiCompletionServiceSettings createTestInstance() { + return createRandom(); + } + + @Override + protected AzureOpenAiCompletionServiceSettings mutateInstance(AzureOpenAiCompletionServiceSettings instance) throws IOException { + return createRandom(); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionTaskSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionTaskSettingsTests.java new file mode 100644 index 0000000000000..7f0e730b8835c --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionTaskSettingsTests.java @@ -0,0 +1,99 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.azureopenai.completion; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiServiceFields; +import org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsTaskSettings; +import org.hamcrest.MatcherAssert; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.is; + +public class AzureOpenAiCompletionTaskSettingsTests extends AbstractWireSerializingTestCase { + + public static AzureOpenAiCompletionTaskSettings createRandomWithUser() { + return new AzureOpenAiCompletionTaskSettings(randomAlphaOfLength(15)); + } + + public static AzureOpenAiCompletionTaskSettings createRandom() { + var user = randomBoolean() ? randomAlphaOfLength(15) : null; + return new AzureOpenAiCompletionTaskSettings(user); + } + + public void testFromMap_WithUser() { + var user = "user"; + + assertThat( + new AzureOpenAiCompletionTaskSettings(user), + is(AzureOpenAiCompletionTaskSettings.fromMap(new HashMap<>(Map.of(AzureOpenAiServiceFields.USER, user)))) + ); + } + + public void testFromMap_UserIsEmptyString() { + var thrownException = expectThrows( + ValidationException.class, + () -> AzureOpenAiEmbeddingsTaskSettings.fromMap(new HashMap<>(Map.of(AzureOpenAiServiceFields.USER, ""))) + ); + + MatcherAssert.assertThat( + thrownException.getMessage(), + is(Strings.format("Validation Failed: 1: [task_settings] Invalid value empty string. [user] must be a non-empty string;")) + ); + } + + public void testFromMap_MissingUser_DoesNotThrowException() { + var taskSettings = AzureOpenAiCompletionTaskSettings.fromMap(new HashMap<>(Map.of())); + assertNull(taskSettings.user()); + } + + public void testOverrideWith_KeepsOriginalValuesWithOverridesAreNull() { + var taskSettings = AzureOpenAiCompletionTaskSettings.fromMap(new HashMap<>(Map.of(AzureOpenAiServiceFields.USER, "user"))); + + var overriddenTaskSettings = AzureOpenAiCompletionTaskSettings.of( + taskSettings, + AzureOpenAiCompletionRequestTaskSettings.EMPTY_SETTINGS + ); + assertThat(overriddenTaskSettings, is(taskSettings)); + } + + public void testOverrideWith_UsesOverriddenSettings() { + var user = "user"; + var userOverride = "user override"; + + var taskSettings = AzureOpenAiCompletionTaskSettings.fromMap(new HashMap<>(Map.of(AzureOpenAiServiceFields.USER, user))); + + var requestTaskSettings = AzureOpenAiCompletionRequestTaskSettings.fromMap( + new HashMap<>(Map.of(AzureOpenAiServiceFields.USER, userOverride)) + ); + + var overriddenTaskSettings = AzureOpenAiCompletionTaskSettings.of(taskSettings, requestTaskSettings); + assertThat(overriddenTaskSettings, is(new AzureOpenAiCompletionTaskSettings(userOverride))); + } + + @Override + protected Writeable.Reader instanceReader() { + return AzureOpenAiCompletionTaskSettings::new; + } + + @Override + protected AzureOpenAiCompletionTaskSettings createTestInstance() { + return createRandomWithUser(); + } + + @Override + protected AzureOpenAiCompletionTaskSettings mutateInstance(AzureOpenAiCompletionTaskSettings instance) throws IOException { + return createRandomWithUser(); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsModelTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsModelTests.java index aebc2240983f7..1747155623a98 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsModelTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsModelTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiSecretSettings; +import java.net.URISyntaxException; import java.util.Map; import static org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsTaskSettingsTests.getAzureOpenAiRequestTaskSettingsMap; @@ -65,6 +66,35 @@ public void testCreateModel_FromUpdatedServiceSettings() { assertThat(overridenModel, is(createModel("resource", "deployment", "override_apiversion", "user", "api_key", null, "id"))); } + public void testBuildUriString() throws URISyntaxException { + var resource = "resource"; + var deploymentId = "deployment"; + var apiKey = "api key"; + var user = "user"; + var entraId = "entra id"; + var inferenceEntityId = "inference entity id"; + var apiVersion = "2024"; + + var model = createModel(resource, deploymentId, apiVersion, user, apiKey, entraId, inferenceEntityId); + + assertThat( + model.buildUriString().toString(), + is("https://resource.openai.azure.com/openai/deployments/deployment/embeddings?api-version=2024") + ); + } + + public static AzureOpenAiEmbeddingsModel createModelWithRandomValues() { + return createModel( + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10) + ); + } + public static AzureOpenAiEmbeddingsModel createModel( String resourceName, String deploymentId, diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsRequestTaskSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsRequestTaskSettingsTests.java index 3ff73e0f23656..0aef2a97ee0a1 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsRequestTaskSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsRequestTaskSettingsTests.java @@ -11,7 +11,6 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.inference.services.openai.OpenAiServiceFields; -import org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsRequestTaskSettings; import java.util.HashMap; import java.util.Map; @@ -21,30 +20,30 @@ public class AzureOpenAiEmbeddingsRequestTaskSettingsTests extends ESTestCase { public void testFromMap_ReturnsEmptySettings_WhenTheMapIsEmpty() { - var settings = OpenAiEmbeddingsRequestTaskSettings.fromMap(new HashMap<>(Map.of())); - assertThat(settings, is(OpenAiEmbeddingsRequestTaskSettings.EMPTY_SETTINGS)); + var settings = AzureOpenAiEmbeddingsRequestTaskSettings.fromMap(new HashMap<>(Map.of())); + assertThat(settings, is(AzureOpenAiEmbeddingsRequestTaskSettings.EMPTY_SETTINGS)); } public void testFromMap_ReturnsEmptySettings_WhenTheMapDoesNotContainTheFields() { - var settings = OpenAiEmbeddingsRequestTaskSettings.fromMap(new HashMap<>(Map.of("key", "model"))); + var settings = AzureOpenAiEmbeddingsRequestTaskSettings.fromMap(new HashMap<>(Map.of("key", "model"))); assertNull(settings.user()); } public void testFromMap_ReturnsUser() { - var settings = OpenAiEmbeddingsRequestTaskSettings.fromMap(new HashMap<>(Map.of(OpenAiServiceFields.USER, "user"))); + var settings = AzureOpenAiEmbeddingsRequestTaskSettings.fromMap(new HashMap<>(Map.of(OpenAiServiceFields.USER, "user"))); assertThat(settings.user(), is("user")); } public void testFromMap_WhenUserIsEmpty_ThrowsValidationException() { var exception = expectThrows( ValidationException.class, - () -> OpenAiEmbeddingsRequestTaskSettings.fromMap(new HashMap<>(Map.of(OpenAiServiceFields.USER, ""))) + () -> AzureOpenAiEmbeddingsRequestTaskSettings.fromMap(new HashMap<>(Map.of(OpenAiServiceFields.USER, ""))) ); assertThat(exception.getMessage(), containsString("[user] must be a non-empty string")); } - public static Map getRequestTaskSettingsMap(@Nullable String user) { + public static Map createRequestTaskSettingsMap(@Nullable String user) { var map = new HashMap(); if (user != null) { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsServiceSettingsTests.java index 79bd28fd8b600..7c56ffad27c80 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsServiceSettingsTests.java @@ -21,7 +21,6 @@ import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiServiceFields; import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; import org.elasticsearch.xpack.inference.services.settings.RateLimitSettingsTests; -import org.hamcrest.CoreMatchers; import org.hamcrest.MatcherAssert; import java.io.IOException; @@ -364,7 +363,7 @@ public void testToXContent_WritesDimensionsSetByUserTrue() throws IOException { entity.toXContent(builder, null); String xContentResult = Strings.toString(builder); - assertThat(xContentResult, CoreMatchers.is(""" + assertThat(xContentResult, is(""" {"resource_name":"resource","deployment_id":"deployment","api_version":"apiVersion",""" + """ "rate_limit":{"requests_per_minute":2},"dimensions_set_by_user":true}""")); } @@ -385,12 +384,12 @@ public void testToXContent_WritesAllValues() throws IOException { entity.toXContent(builder, null); String xContentResult = Strings.toString(builder); - assertThat(xContentResult, CoreMatchers.is(""" + assertThat(xContentResult, is(""" {"resource_name":"resource","deployment_id":"deployment","api_version":"apiVersion",""" + """ "dimensions":1024,"max_input_tokens":512,"rate_limit":{"requests_per_minute":3},"dimensions_set_by_user":false}""")); } - public void testToFilteredXContent_WritesAllValues_ExceptDimensionsSetByUser() throws IOException { + public void testToFilteredXContent_WritesAllValues_Except_DimensionsSetByUser_RateLimit() throws IOException { var entity = new AzureOpenAiEmbeddingsServiceSettings( "resource", "deployment", @@ -407,9 +406,9 @@ public void testToFilteredXContent_WritesAllValues_ExceptDimensionsSetByUser() t filteredXContent.toXContent(builder, null); String xContentResult = Strings.toString(builder); - assertThat(xContentResult, CoreMatchers.is(""" + assertThat(xContentResult, is(""" {"resource_name":"resource","deployment_id":"deployment","api_version":"apiVersion",""" + """ - "dimensions":1024,"max_input_tokens":512,"rate_limit":{"requests_per_minute":1}}""")); + "dimensions":1024,"max_input_tokens":512}""")); } @Override diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettingsTests.java index cb224f4089c0a..a010f63802052 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettingsTests.java @@ -21,7 +21,6 @@ import org.elasticsearch.xpack.inference.services.ServiceUtils; import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; import org.elasticsearch.xpack.inference.services.settings.RateLimitSettingsTests; -import org.hamcrest.CoreMatchers; import org.hamcrest.MatcherAssert; import java.io.IOException; @@ -260,7 +259,7 @@ public void testXContent_WritesModelId() throws IOException { entity.toXContent(builder, null); String xContentResult = Strings.toString(builder); - assertThat(xContentResult, CoreMatchers.is(""" + assertThat(xContentResult, is(""" {"model_id":"modelId","rate_limit":{"requests_per_minute":1}}""")); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettingsTests.java index 24edb9bfe87f0..1ac97642f0b85 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettingsTests.java @@ -16,6 +16,9 @@ import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.ml.inference.MlInferenceNamedXContentProvider; import org.elasticsearch.xpack.inference.InferenceNamedWriteablesProvider; import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; @@ -23,6 +26,7 @@ import org.elasticsearch.xpack.inference.services.ServiceUtils; import org.elasticsearch.xpack.inference.services.cohere.CohereServiceSettings; import org.elasticsearch.xpack.inference.services.cohere.CohereServiceSettingsTests; +import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; import org.hamcrest.MatcherAssert; import java.io.IOException; @@ -314,6 +318,35 @@ public void testFromCohereOrDenseVectorEnumValues() { assertTrue(validation.validationErrors().isEmpty()); } + public void testToXContent_WritesAllValues() throws IOException { + var serviceSettings = new CohereEmbeddingsServiceSettings( + new CohereServiceSettings("url", SimilarityMeasure.COSINE, 5, 10, "model_id", new RateLimitSettings(3)), + CohereEmbeddingType.INT8 + ); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + serviceSettings.toXContent(builder, null); + String xContentResult = Strings.toString(builder); + assertThat(xContentResult, is(""" + {"url":"url","similarity":"cosine","dimensions":5,"max_input_tokens":10,"model_id":"model_id",""" + """ + "rate_limit":{"requests_per_minute":3},"embedding_type":"byte"}""")); + } + + public void testToXContent_WritesAllValues_Except_RateLimit() throws IOException { + var serviceSettings = new CohereEmbeddingsServiceSettings( + new CohereServiceSettings("url", SimilarityMeasure.COSINE, 5, 10, "model_id", new RateLimitSettings(3)), + CohereEmbeddingType.INT8 + ); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + var filteredXContent = serviceSettings.getFilteredXContentObject(); + filteredXContent.toXContent(builder, null); + String xContentResult = Strings.toString(builder); + assertThat(xContentResult, is(""" + {"url":"url","similarity":"cosine","dimensions":5,"max_input_tokens":10,"model_id":"model_id",""" + """ + "embedding_type":"byte"}""")); + } + @Override protected Writeable.Reader instanceReader() { return CohereEmbeddingsServiceSettings::new; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsTaskSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsTaskSettingsTests.java index 64af547171af2..4f5d872f09eb8 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsTaskSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsTaskSettingsTests.java @@ -15,7 +15,6 @@ import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.elasticsearch.xpack.inference.services.cohere.CohereServiceFields; import org.elasticsearch.xpack.inference.services.cohere.CohereTruncation; -import org.hamcrest.CoreMatchers; import org.hamcrest.MatcherAssert; import java.io.IOException; @@ -110,7 +109,7 @@ private static > String getValidValuesSortedAndCombined(EnumSe public void testXContent_ThrowsAssertionFailure_WhenInputTypeIsUnspecified() { var thrownException = expectThrows(AssertionError.class, () -> new CohereEmbeddingsTaskSettings(InputType.UNSPECIFIED, null)); - MatcherAssert.assertThat(thrownException.getMessage(), CoreMatchers.is("received invalid input type value [unspecified]")); + MatcherAssert.assertThat(thrownException.getMessage(), is("received invalid input type value [unspecified]")); } public void testOf_KeepsOriginalValuesWhenRequestSettingsAreNull_AndRequestInputTypeIsInvalid() { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankServiceSettingsTests.java new file mode 100644 index 0000000000000..cb30077fec174 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankServiceSettingsTests.java @@ -0,0 +1,94 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.cohere.rerank; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.SimilarityMeasure; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.ml.inference.MlInferenceNamedXContentProvider; +import org.elasticsearch.xpack.inference.InferenceNamedWriteablesProvider; +import org.elasticsearch.xpack.inference.services.cohere.CohereServiceSettings; +import org.elasticsearch.xpack.inference.services.cohere.CohereServiceSettingsTests; +import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.is; + +public class CohereRerankServiceSettingsTests extends AbstractWireSerializingTestCase { + public static CohereRerankServiceSettings createRandom() { + var commonSettings = CohereServiceSettingsTests.createRandom(); + + return new CohereRerankServiceSettings(commonSettings); + } + + public void testToXContent_WritesAllValues() throws IOException { + var serviceSettings = new CohereRerankServiceSettings( + new CohereServiceSettings("url", SimilarityMeasure.COSINE, 5, 10, "model_id", new RateLimitSettings(3)) + ); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + serviceSettings.toXContent(builder, null); + String xContentResult = Strings.toString(builder); + // TODO we probably shouldn't allow configuring these fields for reranking + assertThat(xContentResult, is(""" + {"url":"url","similarity":"cosine","dimensions":5,"max_input_tokens":10,"model_id":"model_id",""" + """ + "rate_limit":{"requests_per_minute":3}}""")); + } + + public void testToXContent_WritesAllValues_Except_RateLimit() throws IOException { + var serviceSettings = new CohereRerankServiceSettings( + new CohereServiceSettings("url", SimilarityMeasure.COSINE, 5, 10, "model_id", new RateLimitSettings(3)) + ); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + var filteredXContent = serviceSettings.getFilteredXContentObject(); + filteredXContent.toXContent(builder, null); + String xContentResult = Strings.toString(builder); + // TODO we probably shouldn't allow configuring these fields for reranking + assertThat(xContentResult, is(""" + {"url":"url","similarity":"cosine","dimensions":5,"max_input_tokens":10,"model_id":"model_id"}""")); + } + + @Override + protected Writeable.Reader instanceReader() { + return CohereRerankServiceSettings::new; + } + + @Override + protected CohereRerankServiceSettings createTestInstance() { + return createRandom(); + } + + @Override + protected CohereRerankServiceSettings mutateInstance(CohereRerankServiceSettings instance) throws IOException { + return null; + } + + @Override + protected NamedWriteableRegistry getNamedWriteableRegistry() { + List entries = new ArrayList<>(); + entries.addAll(new MlInferenceNamedXContentProvider().getNamedWriteables()); + entries.addAll(InferenceNamedWriteablesProvider.getNamedWriteables()); + return new NamedWriteableRegistry(entries); + } + + public static Map getServiceSettingsMap(@Nullable String url, @Nullable String model) { + return new HashMap<>(CohereServiceSettingsTests.getServiceSettingsMap(url, model)); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettingsTests.java index 8ebf5b1dfd615..d81c94a0dedda 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettingsTests.java @@ -173,6 +173,18 @@ public void testToXContent_WritesAllValues() throws IOException { {"url":"url","rate_limit":{"requests_per_minute":3}}""")); } + public void testToXContent_WritesAllValues_Except_RateLimit() throws IOException { + var serviceSettings = new HuggingFaceServiceSettings(ServiceUtils.createUri("url"), null, null, null, new RateLimitSettings(3)); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + var filteredXContent = serviceSettings.getFilteredXContentObject(); + filteredXContent.toXContent(builder, null); + String xContentResult = org.elasticsearch.common.Strings.toString(builder); + + assertThat(xContentResult, is(""" + {"url":"url"}""")); + } + @Override protected Writeable.Reader instanceReader() { return HuggingFaceServiceSettings::new; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettingsTests.java index 525f701323511..eadefddecce70 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettingsTests.java @@ -11,6 +11,11 @@ import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.services.ServiceUtils; +import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; import java.io.IOException; import java.util.HashMap; @@ -82,6 +87,29 @@ public void testFromMap_InvalidUrl_ThrowsError() { ); } + public void testToXContent_WritesAllValues() throws IOException { + var serviceSettings = new HuggingFaceElserServiceSettings(ServiceUtils.createUri("url"), new RateLimitSettings(3)); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + serviceSettings.toXContent(builder, null); + String xContentResult = org.elasticsearch.common.Strings.toString(builder); + + assertThat(xContentResult, is(""" + {"url":"url","max_input_tokens":512,"rate_limit":{"requests_per_minute":3}}""")); + } + + public void testToXContent_WritesAllValues_Except_RateLimit() throws IOException { + var serviceSettings = new HuggingFaceElserServiceSettings(ServiceUtils.createUri("url"), new RateLimitSettings(3)); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + var filteredXContent = serviceSettings.getFilteredXContentObject(); + filteredXContent.toXContent(builder, null); + String xContentResult = org.elasticsearch.common.Strings.toString(builder); + + assertThat(xContentResult, is(""" + {"url":"url","max_input_tokens":512}""")); + } + @Override protected Writeable.Reader instanceReader() { return HuggingFaceElserServiceSettings::new; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionServiceSettingsTests.java index 5531f1c14ddff..b9b4310699d07 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionServiceSettingsTests.java @@ -211,6 +211,19 @@ public void testToXContent_DoesNotWriteOptionalValues() throws IOException { {"model_id":"model","rate_limit":{"requests_per_minute":500}}""")); } + public void testToXContent_WritesAllValues_Except_RateLimit() throws IOException { + var serviceSettings = new OpenAiChatCompletionServiceSettings("model", "url", "org", 1024, new RateLimitSettings(2)); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + var filteredXContent = serviceSettings.getFilteredXContentObject(); + filteredXContent.toXContent(builder, null); + String xContentResult = org.elasticsearch.common.Strings.toString(builder); + + assertThat(xContentResult, is(""" + {"model_id":"model","url":"url","organization_id":"org",""" + """ + "max_input_tokens":1024}""")); + } + @Override protected Writeable.Reader instanceReader() { return OpenAiChatCompletionServiceSettings::new; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModelTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModelTests.java index b978e2563ece7..86b7f4421954d 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModelTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModelTests.java @@ -16,7 +16,7 @@ import java.util.Map; -import static org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsRequestTaskSettingsTests.getRequestTaskSettingsMap; +import static org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsRequestTaskSettingsTests.createRequestTaskSettingsMap; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.sameInstance; @@ -24,7 +24,7 @@ public class OpenAiEmbeddingsModelTests extends ESTestCase { public void testOverrideWith_OverridesUser() { var model = createModel("url", "org", "api_key", "model_name", null); - var requestTaskSettingsMap = getRequestTaskSettingsMap("user_override"); + var requestTaskSettingsMap = createRequestTaskSettingsMap("user_override"); var overriddenModel = OpenAiEmbeddingsModel.of(model, requestTaskSettingsMap); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsRequestTaskSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsRequestTaskSettingsTests.java index c95853e2d0128..6892e92d936e5 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsRequestTaskSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsRequestTaskSettingsTests.java @@ -32,7 +32,7 @@ public void testFromMap_ReturnsUser() { assertThat(settings.user(), is("user")); } - public static Map getRequestTaskSettingsMap(@Nullable String user) { + public static Map createRequestTaskSettingsMap(@Nullable String user) { var map = new HashMap(); if (user != null) { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettingsTests.java index 92fb00a4061e2..c964d2643459d 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettingsTests.java @@ -22,7 +22,6 @@ import org.elasticsearch.xpack.inference.services.openai.OpenAiServiceFields; import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; import org.elasticsearch.xpack.inference.services.settings.RateLimitSettingsTests; -import org.hamcrest.CoreMatchers; import java.io.IOException; import java.net.URI; @@ -366,7 +365,7 @@ public void testToXContent_WritesDimensionsSetByUserTrue() throws IOException { entity.toXContent(builder, null); String xContentResult = Strings.toString(builder); - assertThat(xContentResult, CoreMatchers.is(""" + assertThat(xContentResult, is(""" {"model_id":"model","url":"url","organization_id":"org",""" + """ "rate_limit":{"requests_per_minute":3000},"dimensions_set_by_user":true}""")); } @@ -378,7 +377,7 @@ public void testToXContent_WritesDimensionsSetByUserFalse() throws IOException { entity.toXContent(builder, null); String xContentResult = Strings.toString(builder); - assertThat(xContentResult, CoreMatchers.is(""" + assertThat(xContentResult, is(""" {"model_id":"model","url":"url","organization_id":"org",""" + """ "rate_limit":{"requests_per_minute":3000},"dimensions_set_by_user":false}""")); } @@ -390,7 +389,7 @@ public void testToXContent_WritesAllValues() throws IOException { entity.toXContent(builder, null); String xContentResult = Strings.toString(builder); - assertThat(xContentResult, CoreMatchers.is(""" + assertThat(xContentResult, is(""" {"model_id":"model","url":"url","organization_id":"org","similarity":"dot_product",""" + """ "dimensions":1,"max_input_tokens":2,"rate_limit":{"requests_per_minute":3000},"dimensions_set_by_user":false}""")); } @@ -403,9 +402,9 @@ public void testToFilteredXContent_WritesAllValues_ExceptDimensionsSetByUser() t filteredXContent.toXContent(builder, null); String xContentResult = Strings.toString(builder); - assertThat(xContentResult, CoreMatchers.is(""" + assertThat(xContentResult, is(""" {"model_id":"model","url":"url","organization_id":"org","similarity":"dot_product",""" + """ - "dimensions":1,"max_input_tokens":2,"rate_limit":{"requests_per_minute":3000}}""")); + "dimensions":1,"max_input_tokens":2}""")); } public void testToFilteredXContent_WritesAllValues_WithSpecifiedRateLimit() throws IOException { @@ -425,9 +424,9 @@ public void testToFilteredXContent_WritesAllValues_WithSpecifiedRateLimit() thro filteredXContent.toXContent(builder, null); String xContentResult = Strings.toString(builder); - assertThat(xContentResult, CoreMatchers.is(""" + assertThat(xContentResult, is(""" {"model_id":"model","url":"url","organization_id":"org","similarity":"dot_product",""" + """ - "dimensions":1,"max_input_tokens":2,"rate_limit":{"requests_per_minute":2000}}""")); + "dimensions":1,"max_input_tokens":2}""")); } @Override diff --git a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/30_semantic_text_inference.yml b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/30_semantic_text_inference.yml index f6ea61ee2cee4..067b79aec1fdd 100644 --- a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/30_semantic_text_inference.yml +++ b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/30_semantic_text_inference.yml @@ -144,7 +144,6 @@ setup: - match: { hits.total.value: 2 } - match: { hits.total.relation: eq } - match: { hits.hits.0.inner_hits.sparse_field\.inference\.chunks.hits.total.value: 2 } - - match: { hits.hits.0.inner_hits.sparse_field\.inference\.chunks.hits.total.value: 2 } - match: { hits.hits.0.inner_hits.sparse_field\.inference\.chunks.hits.total.relation: eq } - length: { hits.hits.0.inner_hits.sparse_field\.inference\.chunks.hits.hits.0.fields.sparse_field\.inference\.chunks.0.text: 1 } @@ -208,7 +207,6 @@ setup: - match: { hits.total.value: 2 } - match: { hits.total.relation: eq } - match: { hits.hits.0.inner_hits.dense_field\.inference\.chunks.hits.total.value: 2 } - - match: { hits.hits.0.inner_hits.dense_field\.inference\.chunks.hits.total.value: 2 } - match: { hits.hits.0.inner_hits.dense_field\.inference\.chunks.hits.total.relation: eq } - length: { hits.hits.0.inner_hits.dense_field\.inference\.chunks.hits.hits.0.fields.dense_field\.inference\.chunks.0.text: 1 } @@ -457,7 +455,7 @@ setup: properties: sparse_field: type: semantic_text - inference_id: dense-inference-id + inference_id: sparse-inference-id source_field: type: text copy_to: sparse_field @@ -499,7 +497,7 @@ setup: properties: sparse_field: type: semantic_text - inference_id: dense-inference-id + inference_id: sparse-inference-id source_field: type: text copy_to: sparse_field @@ -772,3 +770,4 @@ setup: - exists: _source.sparse_field.inference.chunks.0.embeddings - match: { _source.dense_field.text: "another updated inference test" } - match: { _source.dense_field.inference.chunks.0.text: "another updated inference test" } + - exists: _source.dense_field.inference.chunks.0.embeddings diff --git a/x-pack/plugin/ml/build.gradle b/x-pack/plugin/ml/build.gradle index 26f5ea053771c..f42dcc6179d04 100644 --- a/x-pack/plugin/ml/build.gradle +++ b/x-pack/plugin/ml/build.gradle @@ -1,6 +1,5 @@ import org.elasticsearch.gradle.VersionProperties import org.elasticsearch.gradle.internal.dra.DraResolvePlugin -import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' @@ -115,12 +114,6 @@ artifacts { archives tasks.named("jar") } -if (BuildParams.isSnapshotBuild() == false) { - tasks.named("test").configure { - systemProperty 'es.semantic_text_feature_flag_enabled', 'true' - } -} - tasks.register("extractNativeLicenses", Copy) { dependsOn configurations.nativeBundle into "${buildDir}/extractedNativeLicenses" diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/MlDistributedFailureIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/MlDistributedFailureIT.java index a2b00974d4038..33fd7c108863b 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/MlDistributedFailureIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/MlDistributedFailureIT.java @@ -144,7 +144,6 @@ public void testLoseDedicatedMasterNode() throws Exception { }); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/104081") public void testFullClusterRestart() throws Exception { internalCluster().ensureAtLeastNumDataNodes(3); ensureStableCluster(); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/FrequentItemSetCollector.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/FrequentItemSetCollector.java index 18086748d6fe0..bd80e362f2f71 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/FrequentItemSetCollector.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/FrequentItemSetCollector.java @@ -177,7 +177,8 @@ FrequentItemSet toFrequentItemSet(List fields) throws IOException { int pos = items.nextSetBit(0); while (pos > 0) { Tuple item = transactionStore.getItem(topItemIds.getItemIdAt(pos - 1)); - assert item.v1() < fields.size() : "item id exceed number of given items, did you configure eclat correctly?"; + assert item.v1() < fields.size() + : "eclat error: item id (" + item.v1() + ") exceeds the number of given items (" + fields.size() + ")"; final Field field = fields.get(item.v1()); Object formattedValue = field.formatValue(item.v2()); String fieldName = fields.get(item.v1()).getName(); @@ -252,19 +253,20 @@ public FrequentItemSetCollector(TransactionStore transactionStore, TopItemIds to this.topItemIds = topItemIds; this.size = size; this.min = min; - queue = new FrequentItemSetPriorityQueue(size); - frequentItemsByCount = Maps.newMapWithExpectedSize(size / 10); + this.queue = new FrequentItemSetPriorityQueue(size); + this.frequentItemsByCount = Maps.newMapWithExpectedSize(size / 10); } public FrequentItemSet[] finalizeAndGetResults(List fields) throws IOException { - FrequentItemSet[] topFrequentItems = new FrequentItemSet[size()]; + FrequentItemSet[] topFrequentItems = new FrequentItemSet[queue.size()]; for (int i = topFrequentItems.length - 1; i >= 0; i--) { topFrequentItems[i] = queue.pop().toFrequentItemSet(fields); } return topFrequentItems; } - public int size() { + // Visible for testing + int size() { return queue.size(); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/ItemSetMapReduceAggregator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/ItemSetMapReduceAggregator.java index 72bfb6f1f0394..0f9555c77341f 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/ItemSetMapReduceAggregator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/ItemSetMapReduceAggregator.java @@ -86,17 +86,15 @@ protected ItemSetMapReduceAggregator( boolean rewriteBasedOnOrdinals = false; - if (ctx.isPresent()) { - for (var c : configsAndValueFilters) { - ItemSetMapReduceValueSource e = context.getValuesSourceRegistry() - .getAggregator(registryKey, c.v1()) - .build(c.v1(), id++, c.v2(), ordinalOptimization, ctx.get()); - if (e.getField().getName() != null) { - fields.add(e.getField()); - valueSources.add(e); - } - rewriteBasedOnOrdinals |= e.usesOrdinals(); + for (var c : configsAndValueFilters) { + ItemSetMapReduceValueSource e = context.getValuesSourceRegistry() + .getAggregator(registryKey, c.v1()) + .build(c.v1(), id++, c.v2(), ordinalOptimization, ctx); + if (e.getField().getName() != null) { + fields.add(e.getField()); + valueSources.add(e); } + rewriteBasedOnOrdinals |= e.usesOrdinals(); } this.rewriteBasedOnOrdinals = rewriteBasedOnOrdinals; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/ItemSetMapReduceValueSource.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/ItemSetMapReduceValueSource.java index c9ec772eb3321..08adecd3fbce5 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/ItemSetMapReduceValueSource.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/ItemSetMapReduceValueSource.java @@ -37,6 +37,7 @@ import java.util.Collections; import java.util.List; import java.util.Objects; +import java.util.Optional; /** * Interface to extract values from Lucene in order to feed it into the MapReducer. @@ -53,7 +54,7 @@ ItemSetMapReduceValueSource build( int id, IncludeExclude includeExclude, AbstractItemSetMapReducer.OrdinalOptimization ordinalOptimization, - LeafReaderContext ctx + Optional ctx ) throws IOException; } @@ -345,20 +346,21 @@ public KeywordValueSource( int id, IncludeExclude includeExclude, AbstractItemSetMapReducer.OrdinalOptimization ordinalOptimization, - LeafReaderContext ctx + Optional ctx ) throws IOException { super(config, id, ValueFormatter.BYTES_REF); if (AbstractItemSetMapReducer.OrdinalOptimization.GLOBAL_ORDINALS.equals(ordinalOptimization) && config.getValuesSource() instanceof Bytes.WithOrdinals - && ((Bytes.WithOrdinals) config.getValuesSource()).supportsGlobalOrdinalsMapping()) { + && ((Bytes.WithOrdinals) config.getValuesSource()).supportsGlobalOrdinalsMapping() + && ctx.isPresent()) { logger.debug("Use ordinals for field [{}]", config.fieldContext().field()); this.executionStrategy = new GlobalOrdinalsStrategy( getField(), (Bytes.WithOrdinals) config.getValuesSource(), includeExclude == null ? null : includeExclude.convertToOrdinalsFilter(config.format()), - ctx + ctx.get() ); } else { this.executionStrategy = new MapStrategy( @@ -394,7 +396,7 @@ public NumericValueSource( int id, IncludeExclude includeExclude, AbstractItemSetMapReducer.OrdinalOptimization unusedOrdinalOptimization, - LeafReaderContext unusedCtx + Optional unusedCtx ) { super(config, id, ValueFormatter.LONG); this.source = (Numeric) config.getValuesSource(); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/PrecompiledCharMapNormalizer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/PrecompiledCharMapNormalizer.java index 93dc8077196d7..bbe5bea691c35 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/PrecompiledCharMapNormalizer.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/PrecompiledCharMapNormalizer.java @@ -190,9 +190,20 @@ Reader normalize(CharSequence str) { BytesRef subStr = maybeSubStr.get(); int numChars = UnicodeUtil.UTF8toUTF16(subStr.bytes, subStr.offset, subStr.length, reusableCharDecodeBuffer); normalizedCharPos += numChars; - if (numChars != end - startIter) { - addOffCorrectMap(normalizedCharPos, getLastCumulativeDiff() + end - startIter - numChars); + int charDelta = numChars - (end - startIter); // output length - input length + if (charDelta < 0) { + // normalised form is shorter + int lastDiff = getLastCumulativeDiff(); + addOffCorrectMap(normalizedCharPos, lastDiff + charDelta); + } else if (charDelta > 0) { + // inserted chars, add the offset in the output stream + int lastDiff = getLastCumulativeDiff(); + int startOffset = normalizedCharPos - charDelta; + for (int i = 1; i <= charDelta; i++) { + addOffCorrectMap(startOffset + i, lastDiff - i); + } } + strBuilder.append(reusableCharDecodeBuffer, 0, numChars); bytePos += byteLen; continue; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilder.java index f6fa7ca9005c5..571d9b89a32df 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilder.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilder.java @@ -101,7 +101,7 @@ public TextExpansionQueryBuilder(StreamInput in) throws IOException { this.fieldName = in.readString(); this.modelText = in.readString(); this.modelId = in.readString(); - if (in.getTransportVersion().onOrAfter(TransportVersions.TEXT_EXPANSION_TOKEN_PRUNING_CONFIG_ADDED)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { this.tokenPruningConfig = in.readOptionalWriteable(TokenPruningConfig::new); } else { this.tokenPruningConfig = null; @@ -144,7 +144,7 @@ protected void doWriteTo(StreamOutput out) throws IOException { out.writeString(fieldName); out.writeString(modelText); out.writeString(modelId); - if (out.getTransportVersion().onOrAfter(TransportVersions.TEXT_EXPANSION_TOKEN_PRUNING_CONFIG_ADDED)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeOptionalWriteable(tokenPruningConfig); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/queries/WeightedTokensQueryBuilder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/queries/WeightedTokensQueryBuilder.java index 51139881fc2e4..1e96cb293be3b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/queries/WeightedTokensQueryBuilder.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/queries/WeightedTokensQueryBuilder.java @@ -221,7 +221,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.TEXT_EXPANSION_TOKEN_PRUNING_CONFIG_ADDED; + return TransportVersions.V_8_13_0; } private static float parseWeight(String token, Object weight) throws IOException { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/DestinationIndexTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/DestinationIndexTests.java index 2f3ccaa313b0d..f0f7dec448d99 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/DestinationIndexTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/DestinationIndexTests.java @@ -61,6 +61,7 @@ import java.util.Map; import static java.util.Collections.singletonMap; +import static org.elasticsearch.action.support.ActionTestUtils.assertNoSuccessListener; import static org.elasticsearch.common.xcontent.support.XContentMapValues.extractValue; import static org.elasticsearch.xpack.ml.DefaultMachineLearningExtension.ANALYTICS_DEST_INDEX_ALLOWED_SETTINGS; import static org.hamcrest.Matchers.arrayContaining; @@ -334,10 +335,7 @@ private Map testCreateDestinationIndex(DataFrameAnalysis analysi clock, config, ANALYTICS_DEST_INDEX_ALLOWED_SETTINGS, - ActionListener.wrap( - response -> fail("should not succeed"), - e -> assertThat(e.getMessage(), Matchers.matchesRegex(finalErrorMessage)) - ) + assertNoSuccessListener(e -> assertThat(e.getMessage(), Matchers.matchesRegex(finalErrorMessage))) ); return null; @@ -578,8 +576,7 @@ public void testCreateDestinationIndex_ResultsFieldsExistsInSourceIndex() { clock, config, ANALYTICS_DEST_INDEX_ALLOWED_SETTINGS, - ActionListener.wrap( - response -> fail("should not succeed"), + assertNoSuccessListener( e -> assertThat( e.getMessage(), equalTo("A field that matches the dest.results_field [ml] already exists; please set a different results_field") diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/PrecompiledCharMapNormalizerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/PrecompiledCharMapNormalizerTests.java index eef9902d35e59..20b68b2b6e750 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/PrecompiledCharMapNormalizerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/PrecompiledCharMapNormalizerTests.java @@ -62,6 +62,28 @@ public void testCharThatNormalizesToLongText() throws IOException { assertNormalization("ﷺ", parsed, "صلى الله عليه وسلم"); } + public void testOutOfBounds() throws IOException { + @SuppressWarnings("checkstyle:linelength") + String[] inputs = new String[] { + "ﷺ", + "Građevne strukture Mesa Verde dokaz su akumuliranog znanja i vještina koje su se stoljećima prenosile generacijama civilizacije Anasazi. Vrhunce svojih dosega ostvarili su u 12. i 13. stoljeću, kada su sagrađene danas najpoznatije građevine na liticama. Zidali su obrađenim pješčenjakom, tvrđim kamenom oblikovanim do veličine štruce kruha. Kao žbuku između ciglā stavljali su glinu razmočenu vodom. Tim su materijalom gradili prostorije veličine do 6 četvornih metara. U potkrovljima su skladištili žitarice i druge plodine, dok su kive - ceremonijalne prostorije - gradili ispred soba, ali ukopane u zemlju, nešto poput današnjih podruma. Kiva je bila vrhunski dizajnirana prostorija okruglog oblika s prostorom za vatru zimi te s dovodom hladnog zraka za klimatizaciju ljeti. U zidane konstrukcije stavljali su i lokalno posječena stabla, što današnjim arheolozima pomaže u preciznom datiranju nastanka pojedine građevine metodom dendrokronologije. Ta stabla pridonose i teoriji o mogućem konačnom slomu ondašnjeg društva. Nakon što su, tijekom nekoliko stoljeća, šume do kraja srušene, a njihova obnova zbog sušne klime traje i po 200 godina, nije proteklo puno vremena do konačnog urušavanja civilizacije, koja se, na svojem vrhuncu osjećala nepobjedivom. 90 % sagrađenih naseobina ispod stijena ima do deset prostorija. ⅓ od ukupnog broja sagrađenih kuća ima jednu ili dvije kamene prostorije", + "Histoarysk wie in acre in stik lân dat 40 roeden (oftewol 1 furlong of ⅛ myl of 660 foet) lang wie, en 4 roeden (of 66 foet) breed. Men is fan tinken dat dat likernôch de grûnmjitte wie dy't men mei in jok oksen yn ien dei beploegje koe.", + "創業当初の「太平洋化学工業社」から1959年太平洋化学工業株式会社へ、1987年には太平洋化学㈱に社名を変更。 1990年以降、海外拠点を増やし本格的な国際進出を始動。 創業者がつくりあげた化粧品会社を世界企業へと成長させるべく2002年3月英文社名AMOREPACIFICに改めた。", + "امام محمد بن جرير رح جن جي ولادت باسعادت 224 هجري طبرستان جي شهر آمل ۾ ٿي ، هي اهو دور هو جڏهن سلطنت عباسيه جو عروج هو ۽ سندس سڄي جمار عهد خلافت عباسيه ۾ گذري ، طبرستان هن وقت پڻ سياست ۽ مذهبي حلقن جنهن ۾ معتزلي ، خوارج ، باطني جو گهوارو هو ۽ ابن جرير جي ٻيهر طبرستان ورڻ وقت روافض جو عروج ٿي ويو هو ابن جرير رح جو نالو ، محمد بن جرير بن يزيد بن ڪثير بن غالب الطبري الآملي هو سندس کوڙ سار لقب آهن جنهن ۾ الامام ، المجتهد ، المفسر ، المورخ، المحدث ، الحافظ ، العلامه ، اللغوي ، المقريءَ ۽ سندس اهي سڀئي القاب سندس بزرگيت تي دلالت ڪن ٿيون . سندس ڪنيت (ابن جرير) هئي ۽ طبرستان ۽ آمل ڏينهن نسبت هجڻ ڪري پاڻ الطبري ۽ الآملي سڏرائيندا هئا. ابن جرير رح هڪ آسودي گهراني ۾ اک کولي ، سندس پيءُ هڪ ڏينهن خواب ڏٺائين ته ابن جرير رح نبي ڪريم ﷺ جي ٻنهي هٿن جي وچ ۾ آهن ۽ نبي ڪريمﷺ جي هٿن مبارڪن ۾ پٿريون آهن جنهن کي ابن جرير رح کڻي اڇلائي رهيا آهن ، عالمن کان جڏهن هن جي تعبير پڇا ڪيائين ته انهن چيو ته اوهان جو پٽ وڏو ٿي ڪري دين جي خدمت سرانجام ڏيندو ۽ اهو خواب ابن جرير جو علم حاصل ڪرڻ جو سبب بڻيو. ابن جرير رح ستن سالن ۾ قرآن مجيد حفظ ڪيائين اٺن سالم ۾ امامت جهڙو فريضو انجام ڏنائين نون سالن ۾ حديث لکڻ شروع ڪيائين ۽ جڏهن سورهن سالن جا ٿيا ته اماماحمد بن حنبل رح جي زيارت جو شوق ۾ بغداد ڏانهن سفر ڪرڻ شروع ڪيائين ، سندس سڄو خرچ ۽ بار پيءُ کڻدو هو جڏهن سندس والد جو انتقال ٿيو ته ورثي ۾ زمين جو ٽڪڙو مليس جنهن جي آمدني مان ابن جرير رح پنهنجو گذر سفر فرمائيندا هئا .", + "۱۔ ھن شق جي مطابق قادياني گروھ يا لاھوري گروھ جي ڪنھن رڪن کي جيڪو پاڻ کي 'احمدي' يا ڪنھن ٻي نالي سان پڪاري جي لاءِ ممنوع قرار ڏنو ويو آھي تہ ھو (الف) ڳالھائي، لکي يا ڪنھن ٻي طريقي سان ڪنھن خليفي يا آنحضور ﷺ جي ڪنھن صحابي کان علاوہڍه ڪنھن کي امير المومنين يا خليفہ المومنين يا خليفہ المسلمين يا صحابی يا رضي الله عنه چئي۔ (ب) آنحضور ﷺ جي گھروارين کان علاوه ڪنھن کي ام المومنين چئي۔ (ج) آنحضور ﷺ جي خاندان جي اھل بيت کان علاوہڍه ڪنھن کي اھل بيت چئي۔ (د) پنھنجي عبادت گاھ کي مسجد چئي۔", + "سعد بن فضالہ جو شام کے جہاد میں سہیل کے ساتھ تھے بیان کرتے ہیں کہ ایک مرتبہ سہیل نے کہا کہ میں نے رسول اللہ ﷺ سے سنا ہے کہ خدا کی راہ میں ایک گھڑی صرف کرنا گھر کے تمام عمر کے اعمال سے بہتر ہے، اس لیے اب میں شام کا جہاد چھوڑ کر گھر نہ جاؤں گا اور یہیں جان دونگا، اس عہد پر اس سختی سے قائم رہے کہ طاعون عمواس میں بھی نہ ہٹے اور 18ھ میں اسی وبا میں شام کے غربت کدہ میں جان دی۔", + "دعوت اسلام کے آغاز یعنی آنحضرتﷺ کے ارقم کے گھر میں تشریف لانے سے پہلے مشرف باسلام ہوئے،پھر ہجرت کے زمانہ میں مکہ سے مدینہ گئے آنحضرتﷺ نے غربت کی اجنبیت دورکرنے کے لیے ان میں اورابوعبیدہ بن تیہاں میں مواخاۃ کرادی۔", + "ضرار اپنے قبیلہ کے اصحاب ثروت میں تھے، عرب میں سب سے بڑی دولت اونٹ کے گلے تھے، ضرار کے پاس ہزار اونٹوں کا گلہ تھا، اسلام کے جذب وولولے میں تمام مال ودولت چھوڑ کر خالی ہاتھ آستانِ نبوی پر پہنچے قبول اسلام کے بعد آنحضرتﷺ نے بنی صید اوربنی ہذیل کی طرف بھیجا۔", + "(2) اگر زلیخا کو ملامت کرنے والی عورتیں آپ ﷺ کی جبین انور دیکھ پاتیں تو ہاتھوں کے بجائے اپنے دل کاٹنے کو ترجیح دیتیں۔صحیح بخاری میں ہے، حضرت عطاء بن یسار ؓہُنے حضرت عبداللہ بن عمرو ؓسے سیّدِ عالمﷺ کے وہ اوصاف دریافت کئے جو توریت میں مذکور ہیں تو انہوں نے فرمایا : ’’خدا کی قسم! حضور سیدُ المرسلینﷺ کے جو اوصاف قرآنِ کریم میں آئے ہیں انہیں میں سے بعض اوصاف توریت میں مذکور ہیں۔ اس کے بعد انہوں نے پڑھنا شروع کیا: اے نبی! ہم نے تمہیں شاہد و مُبَشِّر اور نذیر اور اُمِّیُّوں کا نگہبان بنا کر بھیجا، تم میرے بندے اور میرے رسول ہو، میں نے تمہارا نام متوکل رکھا،نہ بدخلق ہو نہ سخت مزاج، نہ بازاروں میں آواز بلند کرنے والے ہو نہ برائی سے برائی کو دفع کرنے والے بلکہ خطا کاروں کو معاف کرتے ہو اور ان پر احسان فرماتے ہو، اللہ تعالیٰ تمہیں نہ اٹھائے گا جب تک کہ تمہاری برکت سے غیر مستقیم ملت کو اس طرح راست نہ فرمادے کہ لوگ صدق و یقین کے ساتھ ’’ لَآاِلٰہَ اِلَّا اللہُ مُحَمَّدٌ رَّسُوْلُ اللہِ‘‘ پکارنے لگیں اور تمہاری بدولت اندھی آنکھیں بینا اور بہرے کان شنوا (سننے والے) اور پردوں میں لپٹے ہوئے دل کشادہ ہوجائیں۔ اور کعب احبارؓسے سرکارِ رسالت ﷺکی صفات میں توریت شریف کا یہ مضمون بھی منقول ہے کہ’’ اللہ تعالیٰ نے آپ ﷺکی صفت میں فرمایا کہ’’ میں اُنہیں ہر خوبی کے قابل کروں گا، اور ہر خُلقِ کریم عطا فرماؤں گا، اطمینانِ قلب اور وقار کو اُن کا لباس بناؤں گا اور طاعات وا حسان کو ان کا شعار کروں گا۔ تقویٰ کو ان کا ضمیر، حکمت کو ان کا راز، صدق و وفا کو اُن کی طبیعت ،عفوو کرم کو اُن کی عادت ، عدل کو ان کی سیرت، اظہارِ حق کو اُن کی شریعت، ہدایت کو اُن کا امام اور اسلام کو اُن کی ملت بناؤں گا۔ احمد اُن کا نام ہے، مخلوق کو اُن کے صدقے میں گمراہی کے بعد ہدایت اور جہالت کے بعد علم و معرفت اور گمنامی کے بعد رفعت و منزلت عطا کروں گا۔ اُنہیں کی برکت سے قلت کے بعد کثرت اور فقر کے بعد دولت اور تَفَرُّقے کے بعد محبت عنایت کروں گا، اُنہیں کی بدولت مختلف قبائل، غیر مجتمع خواہشوں اور اختلاف رکھنے والے دلوں میں اُلفت پیدا کروں گا اور اُن کی اُمت کو تمام اُمتوں سے بہتر کروں گا۔ ایک اور حدیث میں توریت سے حضور سید المرسلینﷺسے یہ اوصاف منقول ہیں ’’میرے بندے احمد مختار، ان کی جائے ولادت مکہ مکرمہ اور جائے ہجرت مدینہ طیبہ ہے،اُن کی اُمت ہر حال میں اللہ تعالٰی کی کثیر حمد کرنے والی ہے۔ مُنَزَّہٌ عَنْ شَرِیْکٍ فِیْ مَحَاسِنِہٖ", + "بالآخر آنحضرتﷺ کے اس عفو وکرم نے یہ معجزہ دکھایا کہ سہیل حنین کی واپسی کے وقت آپ کے ساتھ ہوگئے اورمقام جعرانہ پہنچ کر خلعتِ اسلام سے سرفراز ہوئے آنحضرت ﷺ نے ازراہ مرحمت حنین کے مالِ غنیمت میں سے سو اونٹ عطا فرمائے، گو فتح مکہ کے بعد کے مسلمانوں کا شمار مؤلفۃ القلوب میں ہے، لیکن سہیل اس زمرہ میں اس حیثیت سے ممتاز ہیں کہ اسلام کے بعد ان سے کوئی بات اسلام کے خلاف ظہور پزیر نہیں ہوئی ،حافظ ابن حجرعسقلانی لکھتے ہیں، کان محمودالا سلام من حین اسلم۔", }; + + PrecompiledCharMapNormalizer.Config parsed = loadTestCharMap(); + + for (var s : inputs) { + normalise(s, parsed); + } + } + private void assertNormalization(String input, PrecompiledCharMapNormalizer.Config config, String expected) throws IOException { PrecompiledCharMapNormalizer normalizer = new PrecompiledCharMapNormalizer( config.offsets(), @@ -77,6 +99,21 @@ private void assertNormalization(String input, PrecompiledCharMapNormalizer.Conf } } + private void normalise(String input, PrecompiledCharMapNormalizer.Config config) throws IOException { + PrecompiledCharMapNormalizer normalizer = new PrecompiledCharMapNormalizer( + config.offsets(), + config.utf8str(), + new StringReader(input) + ); + char[] output = new char[64]; + int offset = 0; + int size = 64; + int read = normalizer.read(output, offset, size); + while (read > 0) { + read = normalizer.read(output, offset, size); + } + } + static PrecompiledCharMapNormalizer.Config loadTestCharMap() throws IOException { return PrecompiledCharMapNormalizer.fromBase64EncodedResource( "/org/elasticsearch/xpack/ml/inference.nlp.tokenizers/spm_precompiled_normalizer.txt" diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/XLMRobertaTestVocab.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/XLMRobertaTestVocab.java new file mode 100644 index 0000000000000..b0cce14c59114 --- /dev/null +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/XLMRobertaTestVocab.java @@ -0,0 +1,2179 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.inference.nlp.tokenizers; + +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.json.JsonXContent; +import org.elasticsearch.xpack.ml.inference.nlp.Vocabulary; + +import java.io.IOException; + +/** + * {@link #loadMultiLingualTestVocab()} loads a vocabulary file containing + * a subset of the XLM RoBERTa vocabulary and scores sufficient to tokenize + * the strings in {@link #MULTILINUGAL_TEXTS}. + * + * {@link #EXPECTED_TOKENS} is the tokenization of {@link #MULTILINUGAL_TEXTS} + * using the vocabulary and scores in the test vocabulary returned by + * {@link #loadMultiLingualTestVocab()}. The expected tokens were produced by + * tokenizing {@link #MULTILINUGAL_TEXTS} with the HuggingFace transformers + * XLMRoBERTa tokenizer and mapping those tokens to the position of the same + * tokens in the test vocab. + */ +public class XLMRobertaTestVocab { + + public static Vocabulary loadMultiLingualTestVocab() throws IOException { + try ( + var parser = JsonXContent.jsonXContent.createParser( + XContentParserConfiguration.EMPTY, + XLMRobertaTokenizer.class.getResourceAsStream( + "/org/elasticsearch/xpack/ml/inference/tokenizers/xlm_roberta_test_vocabulary.json" + ) + ) + ) { + return Vocabulary.PARSER.apply(parser, null); + } + } + + @SuppressWarnings("checkstyle:linelength") + public static String[] MULTILINUGAL_TEXTS = new String[] { + "Građevne strukture Mesa Verde dokaz su akumuliranog znanja i vještina koje su se stoljećima prenosile generacijama civilizacije Anasazi. Vrhunce svojih dosega ostvarili su u 12. i 13. stoljeću, kada su sagrađene danas najpoznatije građevine na liticama. Zidali su obrađenim pješčenjakom, tvrđim kamenom oblikovanim do veličine štruce kruha. Kao žbuku između ciglā stavljali su glinu razmočenu vodom. Tim su materijalom gradili prostorije veličine do 6 četvornih metara. U potkrovljima su skladištili žitarice i druge plodine, dok su kive - ceremonijalne prostorije - gradili ispred soba, ali ukopane u zemlju, nešto poput današnjih podruma. Kiva je bila vrhunski dizajnirana prostorija okruglog oblika s prostorom za vatru zimi te s dovodom hladnog zraka za klimatizaciju ljeti. U zidane konstrukcije stavljali su i lokalno posječena stabla, što današnjim arheolozima pomaže u preciznom datiranju nastanka pojedine građevine metodom dendrokronologije. Ta stabla pridonose i teoriji o mogućem konačnom slomu ondašnjeg društva. Nakon što su, tijekom nekoliko stoljeća, šume do kraja srušene, a njihova obnova zbog sušne klime traje i po 200 godina, nije proteklo puno vremena do konačnog urušavanja civilizacije, koja se, na svojem vrhuncu osjećala nepobjedivom. 90 % sagrađenih naseobina ispod stijena ima do deset prostorija. ⅓ od ukupnog broja sagrađenih kuća ima jednu ili dvije kamene prostorije", + "Histoarysk wie in acre in stik lân dat 40 roeden (oftewol 1 furlong of ⅛ myl of 660 foet) lang wie, en 4 roeden (of 66 foet) breed. Men is fan tinken dat dat likernôch de grûnmjitte wie dy't men mei in jok oksen yn ien dei beploegje koe.", + "創業当初の「太平洋化学工業社」から1959年太平洋化学工業株式会社へ、1987年には太平洋化学㈱に社名を変更。 1990年以降、海外拠点を増やし本格的な国際進出を始動。 創業者がつくりあげた化粧品会社を世界企業へと成長させるべく2002年3月英文社名AMOREPACIFICに改めた。", + "امام محمد بن جرير رح جن جي ولادت باسعادت 224 هجري طبرستان جي شهر آمل ۾ ٿي ، هي اهو دور هو جڏهن سلطنت عباسيه جو عروج هو ۽ سندس سڄي جمار عهد خلافت عباسيه ۾ گذري ، طبرستان هن وقت پڻ سياست ۽ مذهبي حلقن جنهن ۾ معتزلي ، خوارج ، باطني جو گهوارو هو ۽ ابن جرير جي ٻيهر طبرستان ورڻ وقت روافض جو عروج ٿي ويو هو ابن جرير رح جو نالو ، محمد بن جرير بن يزيد بن ڪثير بن غالب الطبري الآملي هو سندس کوڙ سار لقب آهن جنهن ۾ الامام ، المجتهد ، المفسر ، المورخ، المحدث ، الحافظ ، العلامه ، اللغوي ، المقريءَ ۽ سندس اهي سڀئي القاب سندس بزرگيت تي دلالت ڪن ٿيون . سندس ڪنيت (ابن جرير) هئي ۽ طبرستان ۽ آمل ڏينهن نسبت هجڻ ڪري پاڻ الطبري ۽ الآملي سڏرائيندا هئا. ابن جرير رح هڪ آسودي گهراني ۾ اک کولي ، سندس پيءُ هڪ ڏينهن خواب ڏٺائين ته ابن جرير رح نبي ڪريم ﷺ جي ٻنهي هٿن جي وچ ۾ آهن ۽ نبي ڪريمﷺ جي هٿن مبارڪن ۾ پٿريون آهن جنهن کي ابن جرير رح کڻي اڇلائي رهيا آهن ، عالمن کان جڏهن هن جي تعبير پڇا ڪيائين ته انهن چيو ته اوهان جو پٽ وڏو ٿي ڪري دين جي خدمت سرانجام ڏيندو ۽ اهو خواب ابن جرير جو علم حاصل ڪرڻ جو سبب بڻيو. ابن جرير رح ستن سالن ۾ قرآن مجيد حفظ ڪيائين اٺن سالم ۾ امامت جهڙو فريضو انجام ڏنائين نون سالن ۾ حديث لکڻ شروع ڪيائين ۽ جڏهن سورهن سالن جا ٿيا ته اماماحمد بن حنبل رح جي زيارت جو شوق ۾ بغداد ڏانهن سفر ڪرڻ شروع ڪيائين ، سندس سڄو خرچ ۽ بار پيءُ کڻدو هو جڏهن سندس والد جو انتقال ٿيو ته ورثي ۾ زمين جو ٽڪڙو مليس جنهن جي آمدني مان ابن جرير رح پنهنجو گذر سفر فرمائيندا هئا .", + "۱۔ ھن شق جي مطابق قادياني گروھ يا لاھوري گروھ جي ڪنھن رڪن کي جيڪو پاڻ کي 'احمدي' يا ڪنھن ٻي نالي سان پڪاري جي لاءِ ممنوع قرار ڏنو ويو آھي تہ ھو (الف) ڳالھائي، لکي يا ڪنھن ٻي طريقي سان ڪنھن خليفي يا آنحضور ﷺ جي ڪنھن صحابي کان علاوہڍه ڪنھن کي امير المومنين يا خليفہ المومنين يا خليفہ المسلمين يا صحابی يا رضي الله عنه چئي۔ (ب) آنحضور ﷺ جي گھروارين کان علاوه ڪنھن کي ام المومنين چئي۔ (ج) آنحضور ﷺ جي خاندان جي اھل بيت کان علاوہڍه ڪنھن کي اھل بيت چئي۔ (د) پنھنجي عبادت گاھ کي مسجد چئي۔", + "سعد بن فضالہ جو شام کے جہاد میں سہیل کے ساتھ تھے بیان کرتے ہیں کہ ایک مرتبہ سہیل نے کہا کہ میں نے رسول اللہ ﷺ سے سنا ہے کہ خدا کی راہ میں ایک گھڑی صرف کرنا گھر کے تمام عمر کے اعمال سے بہتر ہے، اس لیے اب میں شام کا جہاد چھوڑ کر گھر نہ جاؤں گا اور یہیں جان دونگا، اس عہد پر اس سختی سے قائم رہے کہ طاعون عمواس میں بھی نہ ہٹے اور 18ھ میں اسی وبا میں شام کے غربت کدہ میں جان دی۔", + "دعوت اسلام کے آغاز یعنی آنحضرتﷺ کے ارقم کے گھر میں تشریف لانے سے پہلے مشرف باسلام ہوئے،پھر ہجرت کے زمانہ میں مکہ سے مدینہ گئے آنحضرتﷺ نے غربت کی اجنبیت دورکرنے کے لیے ان میں اورابوعبیدہ بن تیہاں میں مواخاۃ کرادی۔", + "ضرار اپنے قبیلہ کے اصحاب ثروت میں تھے، عرب میں سب سے بڑی دولت اونٹ کے گلے تھے، ضرار کے پاس ہزار اونٹوں کا گلہ تھا، اسلام کے جذب وولولے میں تمام مال ودولت چھوڑ کر خالی ہاتھ آستانِ نبوی پر پہنچے قبول اسلام کے بعد آنحضرتﷺ نے بنی صید اوربنی ہذیل کی طرف بھیجا۔", + "(2) اگر زلیخا کو ملامت کرنے والی عورتیں آپ ﷺ کی جبین انور دیکھ پاتیں تو ہاتھوں کے بجائے اپنے دل کاٹنے کو ترجیح دیتیں۔صحیح بخاری میں ہے، حضرت عطاء بن یسار ؓہُنے حضرت عبداللہ بن عمرو ؓسے سیّدِ عالمﷺ کے وہ اوصاف دریافت کئے جو توریت میں مذکور ہیں تو انہوں نے فرمایا : ’’خدا کی قسم! حضور سیدُ المرسلینﷺ کے جو اوصاف قرآنِ کریم میں آئے ہیں انہیں میں سے بعض اوصاف توریت میں مذکور ہیں۔ اس کے بعد انہوں نے پڑھنا شروع کیا: اے نبی! ہم نے تمہیں شاہد و مُبَشِّر اور نذیر اور اُمِّیُّوں کا نگہبان بنا کر بھیجا، تم میرے بندے اور میرے رسول ہو، میں نے تمہارا نام متوکل رکھا،نہ بدخلق ہو نہ سخت مزاج، نہ بازاروں میں آواز بلند کرنے والے ہو نہ برائی سے برائی کو دفع کرنے والے بلکہ خطا کاروں کو معاف کرتے ہو اور ان پر احسان فرماتے ہو، اللہ تعالیٰ تمہیں نہ اٹھائے گا جب تک کہ تمہاری برکت سے غیر مستقیم ملت کو اس طرح راست نہ فرمادے کہ لوگ صدق و یقین کے ساتھ ’’ لَآاِلٰہَ اِلَّا اللہُ مُحَمَّدٌ رَّسُوْلُ اللہِ‘‘ پکارنے لگیں اور تمہاری بدولت اندھی آنکھیں بینا اور بہرے کان شنوا (سننے والے) اور پردوں میں لپٹے ہوئے دل کشادہ ہوجائیں۔ اور کعب احبارؓسے سرکارِ رسالت ﷺکی صفات میں توریت شریف کا یہ مضمون بھی منقول ہے کہ’’ اللہ تعالیٰ نے آپ ﷺکی صفت میں فرمایا کہ’’ میں اُنہیں ہر خوبی کے قابل کروں گا، اور ہر خُلقِ کریم عطا فرماؤں گا، اطمینانِ قلب اور وقار کو اُن کا لباس بناؤں گا اور طاعات وا حسان کو ان کا شعار کروں گا۔ تقویٰ کو ان کا ضمیر، حکمت کو ان کا راز، صدق و وفا کو اُن کی طبیعت ،عفوو کرم کو اُن کی عادت ، عدل کو ان کی سیرت، اظہارِ حق کو اُن کی شریعت، ہدایت کو اُن کا امام اور اسلام کو اُن کی ملت بناؤں گا۔ احمد اُن کا نام ہے، مخلوق کو اُن کے صدقے میں گمراہی کے بعد ہدایت اور جہالت کے بعد علم و معرفت اور گمنامی کے بعد رفعت و منزلت عطا کروں گا۔ اُنہیں کی برکت سے قلت کے بعد کثرت اور فقر کے بعد دولت اور تَفَرُّقے کے بعد محبت عنایت کروں گا، اُنہیں کی بدولت مختلف قبائل، غیر مجتمع خواہشوں اور اختلاف رکھنے والے دلوں میں اُلفت پیدا کروں گا اور اُن کی اُمت کو تمام اُمتوں سے بہتر کروں گا۔ ایک اور حدیث میں توریت سے حضور سید المرسلینﷺسے یہ اوصاف منقول ہیں ’’میرے بندے احمد مختار، ان کی جائے ولادت مکہ مکرمہ اور جائے ہجرت مدینہ طیبہ ہے،اُن کی اُمت ہر حال میں اللہ تعالٰی کی کثیر حمد کرنے والی ہے۔ مُنَزَّہٌ عَنْ شَرِیْکٍ فِیْ مَحَاسِنِہٖ", + "بالآخر آنحضرتﷺ کے اس عفو وکرم نے یہ معجزہ دکھایا کہ سہیل حنین کی واپسی کے وقت آپ کے ساتھ ہوگئے اورمقام جعرانہ پہنچ کر خلعتِ اسلام سے سرفراز ہوئے آنحضرت ﷺ نے ازراہ مرحمت حنین کے مالِ غنیمت میں سے سو اونٹ عطا فرمائے، گو فتح مکہ کے بعد کے مسلمانوں کا شمار مؤلفۃ القلوب میں ہے، لیکن سہیل اس زمرہ میں اس حیثیت سے ممتاز ہیں کہ اسلام کے بعد ان سے کوئی بات اسلام کے خلاف ظہور پزیر نہیں ہوئی ،حافظ ابن حجرعسقلانی لکھتے ہیں، کان محمودالا سلام من حین اسلم۔", }; + + public static int[][] EXPECTED_TOKENS = new int[][] { + { + 0, + 910, + 256, + 116, + 897, + 65, + 1039, + 830, + 287, + 993, + 660, + 770, + 67, + 619, + 455, + 802, + 73, + 785, + 993, + 990, + 565, + 666, + 194, + 1049, + 110, + 710, + 397, + 283, + 1073, + 666, + 276, + 79, + 486, + 30, + 959, + 912, + 577, + 571, + 658, + 1080, + 327, + 713, + 993, + 457, + 531, + 455, + 553, + 565, + 666, + 46, + 29, + 302, + 993, + 976, + 415, + 155, + 1050, + 956, + 65, + 441, + 65, + 888, + 84, + 511, + 30, + 547, + 908, + 993, + 174, + 350, + 74, + 454, + 500, + 139, + 1026, + 29, + 716, + 337, + 259, + 74, + 874, + 767, + 716, + 961, + 654, + 668, + 460, + 627, + 845, + 577, + 502, + 59, + 30, + 728, + 546, + 140, + 804, + 659, + 67, + 792, + 716, + 358, + 713, + 993, + 783, + 755, + 330, + 278, + 755, + 925, + 74, + 30, + 871, + 993, + 416, + 767, + 1040, + 713, + 331, + 1016, + 460, + 668, + 419, + 568, + 148, + 326, + 306, + 30, + 440, + 36, + 742, + 398, + 727, + 993, + 389, + 795, + 373, + 1009, + 681, + 577, + 455, + 410, + 246, + 1062, + 29, + 641, + 993, + 788, + 921, + 413, + 483, + 329, + 737, + 331, + 1016, + 413, + 1040, + 713, + 482, + 23, + 29, + 253, + 365, + 489, + 457, + 642, + 29, + 544, + 778, + 1077, + 68, + 27, + 379, + 59, + 30, + 639, + 965, + 48, + 52, + 851, + 773, + 331, + 1012, + 1076, + 481, + 661, + 461, + 331, + 767, + 166, + 1010, + 285, + 716, + 662, + 999, + 461, + 668, + 132, + 767, + 936, + 67, + 533, + 166, + 929, + 1046, + 677, + 456, + 124, + 30, + 440, + 183, + 954, + 730, + 65, + 716, + 358, + 713, + 993, + 455, + 637, + 748, + 40, + 472, + 149, + 527, + 709, + 29, + 490, + 1077, + 74, + 777, + 629, + 823, + 665, + 367, + 457, + 560, + 417, + 497, + 478, + 888, + 889, + 684, + 821, + 65, + 441, + 65, + 605, + 74, + 679, + 840, + 736, + 150, + 666, + 30, + 479, + 527, + 709, + 94, + 510, + 864, + 455, + 1074, + 667, + 453, + 308, + 74, + 390, + 74, + 647, + 733, + 469, + 265, + 67, + 764, + 30, + 15, + 490, + 993, + 29, + 447, + 971, + 123, + 29, + 501, + 65, + 668, + 559, + 461, + 591, + 737, + 29, + 449, + 233, + 1034, + 16, + 121, + 993, + 428, + 528, + 65, + 474, + 455, + 1056, + 275, + 324, + 29, + 718, + 991, + 717, + 473, + 980, + 668, + 390, + 67, + 716, + 711, + 464, + 224, + 1073, + 666, + 29, + 811, + 990, + 29, + 888, + 616, + 191, + 184, + 768, + 709, + 846, + 62, + 994, + 144, + 30, + 142, + 409, + 976, + 415, + 65, + 326, + 888, + 575, + 543, + 384, + 537, + 17, + 1029, + 668, + 343, + 331, + 1012, + 30, + 422, + 44, + 33, + 1036, + 279, + 67, + 1053, + 976, + 415, + 65, + 326, + 101, + 1029, + 54, + 1027, + 272, + 874, + 65, + 331, + 1016, + 2 }, + { + 0, + 433, + 204, + 360, + 870, + 514, + 962, + 449, + 295, + 962, + 624, + 208, + 497, + 995, + 1071, + 65, + 538, + 412, + 760, + 883, + 592, + 422, + 707, + 858, + 1032, + 422, + 44, + 34, + 875, + 72, + 1032, + 716, + 254, + 896, + 600, + 24, + 873, + 514, + 29, + 695, + 425, + 1071, + 65, + 538, + 412, + 760, + 98, + 896, + 600, + 24, + 273, + 30, + 729, + 960, + 188, + 1001, + 596, + 497, + 497, + 485, + 76, + 178, + 579, + 679, + 914, + 950, + 74, + 459, + 883, + 514, + 686, + 21, + 80, + 741, + 745, + 962, + 781, + 70, + 716, + 1003, + 151, + 455, + 596, + 522, + 638, + 310, + 65, + 1066, + 1020, + 30, + 2 }, + { + 0, + 716, + 725, + 652, + 77, + 9, + 444, + 463, + 20, + 232, + 10, + 270, + 427, + 886, + 444, + 463, + 20, + 588, + 85, + 4, + 470, + 886, + 692, + 444, + 463, + 22, + 28, + 24, + 71, + 232, + 539, + 100, + 975, + 6, + 146, + 886, + 534, + 4, + 362, + 432, + 122, + 100, + 104, + 90, + 51, + 992, + 39, + 359, + 997, + 32, + 317, + 100, + 292, + 424, + 6, + 716, + 725, + 171, + 582, + 96, + 49, + 58, + 516, + 705, + 100, + 320, + 377, + 968, + 701, + 333, + 86, + 47, + 610, + 886, + 33, + 979, + 115, + 232, + 539, + 731, + 586, + 581, + 1063, + 71, + 664, + 1075, + 6, + 2 }, + { + 0, + 548, + 1013, + 948, + 854, + 215, + 716, + 799, + 867, + 865, + 532, + 953, + 499, + 298, + 758, + 853, + 107, + 819, + 498, + 865, + 314, + 657, + 847, + 274, + 60, + 117, + 395, + 190, + 985, + 402, + 578, + 267, + 352, + 231, + 861, + 154, + 943, + 402, + 271, + 525, + 743, + 135, + 774, + 374, + 590, + 352, + 231, + 274, + 1078, + 117, + 107, + 819, + 498, + 400, + 361, + 282, + 738, + 271, + 439, + 1021, + 849, + 1038, + 274, + 243, + 673, + 93, + 117, + 484, + 797, + 117, + 716, + 200, + 127, + 861, + 825, + 219, + 852, + 402, + 271, + 669, + 854, + 215, + 865, + 923, + 107, + 819, + 498, + 394, + 931, + 361, + 716, + 941, + 11, + 861, + 154, + 943, + 60, + 670, + 402, + 669, + 854, + 215, + 716, + 799, + 861, + 385, + 117, + 1013, + 948, + 854, + 215, + 948, + 838, + 948, + 238, + 91, + 948, + 831, + 963, + 832, + 894, + 108, + 853, + 402, + 525, + 899, + 913, + 12, + 703, + 562, + 1038, + 274, + 900, + 798, + 117, + 554, + 688, + 815, + 117, + 958, + 45, + 117, + 535, + 800, + 782, + 958, + 613, + 117, + 926, + 761, + 117, + 926, + 1008, + 117, + 957, + 1004, + 853, + 117, + 958, + 31, + 207, + 859, + 271, + 525, + 198, + 1014, + 618, + 926, + 406, + 525, + 675, + 211, + 809, + 1048, + 152, + 905, + 689, + 716, + 30, + 525, + 905, + 211, + 412, + 615, + 849, + 854, + 215, + 24, + 706, + 271, + 107, + 819, + 498, + 271, + 657, + 847, + 506, + 5, + 569, + 63, + 363, + 963, + 832, + 271, + 894, + 108, + 853, + 1022, + 1030, + 378, + 635, + 30, + 669, + 854, + 215, + 716, + 799, + 325, + 651, + 355, + 1052, + 229, + 274, + 813, + 899, + 93, + 117, + 525, + 1059, + 860, + 325, + 506, + 353, + 220, + 891, + 119, + 789, + 669, + 854, + 215, + 716, + 799, + 301, + 63, + 848, + 714, + 550, + 749, + 614, + 865, + 754, + 423, + 849, + 865, + 443, + 274, + 562, + 271, + 301, + 63, + 848, + 693, + 550, + 749, + 614, + 865, + 423, + 849, + 159, + 192, + 612, + 274, + 566, + 608, + 562, + 1038, + 904, + 669, + 854, + 215, + 716, + 799, + 982, + 125, + 898, + 847, + 687, + 744, + 562, + 117, + 368, + 849, + 690, + 578, + 400, + 865, + 720, + 262, + 806, + 933, + 789, + 587, + 536, + 789, + 202, + 861, + 266, + 769, + 60, + 63, + 1043, + 865, + 576, + 977, + 601, + 271, + 190, + 353, + 669, + 854, + 215, + 861, + 369, + 280, + 102, + 861, + 82, + 126, + 964, + 852, + 30, + 669, + 854, + 215, + 716, + 799, + 986, + 849, + 747, + 274, + 407, + 234, + 213, + 607, + 933, + 125, + 891, + 849, + 746, + 274, + 548, + 808, + 294, + 839, + 828, + 852, + 187, + 1018, + 771, + 716, + 766, + 747, + 274, + 19, + 78, + 347, + 933, + 271, + 578, + 451, + 849, + 747, + 820, + 509, + 789, + 548, + 203, + 948, + 716, + 739, + 648, + 716, + 799, + 865, + 772, + 861, + 25, + 274, + 227, + 380, + 672, + 102, + 347, + 933, + 117, + 525, + 735, + 214, + 271, + 952, + 1059, + 860, + 972, + 775, + 402, + 578, + 525, + 315, + 861, + 462, + 529, + 789, + 394, + 715, + 274, + 120, + 861, + 716, + 1058, + 418, + 241, + 824, + 1038, + 865, + 318, + 853, + 756, + 669, + 854, + 215, + 716, + 799, + 189, + 436, + 672, + 816, + 687, + 378, + 635, + 716, + 30, + 2 }, + { + 0, + 268, + 951, + 7, + 716, + 903, + 865, + 584, + 168, + 887, + 229, + 653, + 932, + 421, + 217, + 932, + 386, + 653, + 932, + 865, + 716, + 835, + 143, + 612, + 904, + 593, + 363, + 904, + 411, + 203, + 853, + 21, + 421, + 716, + 835, + 185, + 387, + 81, + 209, + 597, + 865, + 296, + 862, + 901, + 223, + 1005, + 670, + 437, + 1033, + 8, + 412, + 176, + 24, + 704, + 687, + 782, + 289, + 421, + 716, + 835, + 185, + 776, + 853, + 81, + 716, + 835, + 716, + 466, + 26, + 421, + 656, + 404, + 714, + 550, + 749, + 614, + 865, + 716, + 835, + 716, + 1031, + 853, + 690, + 519, + 902, + 850, + 716, + 835, + 904, + 128, + 958, + 625, + 230, + 421, + 1037, + 225, + 934, + 958, + 625, + 230, + 421, + 1037, + 225, + 934, + 345, + 421, + 716, + 1031, + 942, + 421, + 371, + 550, + 408, + 307, + 951, + 412, + 807, + 24, + 656, + 404, + 714, + 550, + 749, + 614, + 865, + 555, + 219, + 230, + 690, + 491, + 716, + 835, + 904, + 924, + 958, + 625, + 230, + 307, + 951, + 412, + 810, + 24, + 656, + 404, + 714, + 550, + 749, + 614, + 865, + 396, + 865, + 125, + 1002, + 636, + 690, + 519, + 902, + 850, + 716, + 835, + 904, + 125, + 1002, + 636, + 307, + 951, + 412, + 815, + 24, + 206, + 312, + 791, + 932, + 904, + 617, + 307, + 951, + 2 }, + { + 0, + 349, + 948, + 927, + 186, + 861, + 556, + 779, + 763, + 83, + 112, + 180, + 779, + 1000, + 496, + 750, + 784, + 521, + 967, + 263, + 435, + 112, + 180, + 322, + 1069, + 967, + 83, + 322, + 179, + 611, + 714, + 550, + 749, + 614, + 133, + 762, + 321, + 967, + 841, + 780, + 493, + 83, + 263, + 1051, + 356, + 465, + 515, + 555, + 779, + 1081, + 344, + 779, + 1061, + 133, + 939, + 321, + 782, + 915, + 589, + 922, + 83, + 556, + 682, + 763, + 420, + 694, + 555, + 442, + 820, + 644, + 791, + 197, + 342, + 247, + 814, + 1017, + 685, + 782, + 915, + 480, + 35, + 915, + 645, + 133, + 1041, + 552, + 967, + 106, + 623, + 357, + 622, + 83, + 526, + 442, + 245, + 1024, + 197, + 1067, + 932, + 83, + 988, + 162, + 632, + 83, + 556, + 779, + 545, + 698, + 934, + 83, + 814, + 1072, + 2 }, + { + 0, + 335, + 316, + 779, + 467, + 572, + 656, + 1015, + 693, + 550, + 749, + 614, + 779, + 917, + 43, + 779, + 555, + 83, + 239, + 372, + 133, + 430, + 1023, + 944, + 291, + 1079, + 782, + 893, + 996, + 245, + 719, + 808, + 779, + 628, + 934, + 83, + 557, + 133, + 309, + 332, + 656, + 1015, + 693, + 550, + 749, + 614, + 322, + 545, + 780, + 125, + 517, + 157, + 985, + 595, + 236, + 779, + 589, + 945, + 83, + 197, + 615, + 175, + 732, + 884, + 948, + 134, + 257, + 708, + 83, + 716, + 319, + 740, + 937, + 694, + 966, + 951, + 2 }, + { + 0, + 716, + 328, + 391, + 1070, + 934, + 779, + 338, + 399, + 83, + 496, + 782, + 293, + 83, + 989, + 133, + 564, + 348, + 947, + 177, + 779, + 836, + 949, + 496, + 782, + 716, + 328, + 779, + 523, + 383, + 947, + 177, + 255, + 682, + 836, + 934, + 103, + 782, + 316, + 779, + 216, + 162, + 609, + 199, + 83, + 1081, + 753, + 393, + 602, + 420, + 694, + 434, + 494, + 129, + 498, + 862, + 248, + 261, + 35, + 13, + 1057, + 316, + 779, + 475, + 656, + 1015, + 693, + 550, + 749, + 614, + 322, + 697, + 136, + 163, + 197, + 650, + 942, + 245, + 817, + 180, + 780, + 113, + 906, + 723, + 2 }, + { + 0, + 339, + 722, + 145, + 196, + 740, + 899, + 244, + 92, + 492, + 55, + 299, + 247, + 680, + 714, + 550, + 749, + 614, + 780, + 818, + 182, + 567, + 796, + 520, + 247, + 787, + 205, + 779, + 583, + 391, + 1048, + 682, + 890, + 236, + 899, + 281, + 674, + 1006, + 911, + 160, + 879, + 83, + 321, + 782, + 303, + 716, + 909, + 948, + 250, + 346, + 716, + 790, + 934, + 860, + 236, + 303, + 173, + 948, + 540, + 716, + 790, + 974, + 118, + 863, + 815, + 862, + 368, + 693, + 550, + 749, + 614, + 779, + 340, + 947, + 313, + 724, + 726, + 861, + 351, + 157, + 83, + 928, + 521, + 787, + 594, + 322, + 699, + 429, + 626, + 99, + 780, + 341, + 18, + 827, + 541, + 860, + 958, + 585, + 182, + 693, + 550, + 749, + 614, + 779, + 861, + 947, + 313, + 407, + 862, + 477, + 83, + 130, + 521, + 603, + 83, + 133, + 448, + 947, + 313, + 351, + 157, + 83, + 928, + 880, + 915, + 779, + 475, + 594, + 322, + 201, + 111, + 347, + 193, + 37, + 842, + 75, + 18, + 69, + 322, + 907, + 221, + 162, + 159, + 860, + 807, + 859, + 826, + 323, + 819, + 197, + 161, + 817, + 169, + 197, + 125, + 860, + 848, + 323, + 942, + 300, + 255, + 682, + 716, + 218, + 934, + 222, + 381, + 694, + 526, + 712, + 782, + 786, + 573, + 382, + 949, + 197, + 573, + 179, + 66, + 782, + 83, + 322, + 786, + 934, + 877, + 264, + 580, + 604, + 1042, + 782, + 228, + 938, + 476, + 66, + 442, + 468, + 41, + 782, + 442, + 235, + 255, + 83, + 1045, + 114, + 492, + 56, + 66, + 442, + 940, + 765, + 133, + 940, + 765, + 899, + 561, + 492, + 56, + 471, + 260, + 643, + 255, + 899, + 973, + 784, + 66, + 197, + 945, + 35, + 1064, + 366, + 66, + 782, + 611, + 304, + 882, + 907, + 442, + 1047, + 702, + 791, + 818, + 1028, + 967, + 503, + 452, + 133, + 872, + 195, + 249, + 899, + 915, + 50, + 95, + 442, + 446, + 895, + 967, + 599, + 164, + 162, + 431, + 779, + 1000, + 626, + 170, + 859, + 803, + 806, + 862, + 847, + 882, + 934, + 859, + 125, + 862, + 847, + 290, + 806, + 611, + 860, + 159, + 860, + 812, + 859, + 848, + 290, + 815, + 855, + 143, + 290, + 824, + 860, + 852, + 866, + 847, + 860, + 611, + 862, + 450, + 105, + 236, + 392, + 247, + 197, + 503, + 938, + 602, + 945, + 805, + 288, + 655, + 806, + 197, + 869, + 935, + 690, + 1007, + 158, + 412, + 885, + 236, + 56, + 24, + 197, + 35, + 815, + 255, + 83, + 258, + 1024, + 1079, + 1048, + 691, + 620, + 934, + 876, + 951, + 197, + 237, + 981, + 920, + 192, + 790, + 974, + 984, + 676, + 862, + 458, + 714, + 550, + 749, + 614, + 671, + 969, + 83, + 351, + 157, + 354, + 682, + 342, + 563, + 526, + 251, + 918, + 321, + 967, + 14, + 14, + 611, + 304, + 882, + 322, + 680, + 714, + 550, + 749, + 614, + 671, + 445, + 83, + 699, + 967, + 14, + 14, + 83, + 125, + 860, + 649, + 57, + 388, + 779, + 881, + 694, + 255, + 791, + 782, + 197, + 57, + 131, + 860, + 88, + 862, + 477, + 97, + 446, + 644, + 791, + 782, + 678, + 862, + 549, + 197, + 162, + 405, + 899, + 125, + 860, + 849, + 682, + 334, + 948, + 700, + 791, + 197, + 138, + 1044, + 401, + 878, + 633, + 899, + 945, + 682, + 53, + 694, + 255, + 370, + 930, + 882, + 899, + 945, + 682, + 137, + 311, + 782, + 1055, + 899, + 945, + 682, + 109, + 782, + 164, + 162, + 716, + 570, + 899, + 125, + 860, + 849, + 780, + 61, + 117, + 558, + 852, + 574, + 899, + 125, + 860, + 849, + 780, + 634, + 117, + 305, + 899, + 945, + 780, + 542, + 808, + 782, + 438, + 862, + 892, + 899, + 125, + 860, + 849, + 780, + 507, + 782, + 504, + 899, + 125, + 860, + 849, + 682, + 548, + 197, + 316, + 899, + 125, + 860, + 849, + 780, + 249, + 948, + 700, + 370, + 505, + 125, + 860, + 849, + 682, + 2 }, + { + 0, + 212, + 495, + 656, + 1015, + 693, + 550, + 749, + 614, + 779, + 915, + 716, + 558, + 162, + 595, + 848, + 322, + 342, + 243, + 721, + 934, + 1019, + 153, + 967, + 112, + 180, + 716, + 739, + 182, + 780, + 640, + 779, + 361, + 680, + 779, + 1000, + 518, + 197, + 848, + 426, + 135, + 987, + 284, + 414, + 694, + 1037, + 983, + 862, + 316, + 133, + 752, + 1079, + 656, + 1015, + 714, + 550, + 749, + 614, + 322, + 916, + 794, + 934, + 159, + 512, + 808, + 716, + 739, + 182, + 779, + 753, + 862, + 970, + 92, + 83, + 133, + 998, + 947, + 177, + 97, + 446, + 702, + 782, + 829, + 978, + 557, + 779, + 475, + 779, + 277, + 682, + 487, + 240, + 87, + 937, + 955, + 837, + 83, + 321, + 782, + 513, + 112, + 180, + 915, + 336, + 83, + 915, + 252, + 133, + 734, + 521, + 967, + 316, + 779, + 475, + 945, + 133, + 181, + 946, + 316, + 779, + 226, + 141, + 934, + 172, + 209, + 822, + 169, + 1035, + 1068, + 117, + 761, + 669, + 364, + 833, + 824, + 42, + 286, + 508, + 521, + 782, + 690, + 663, + 156, + 488, + 251, + 1065, + 915, + 89, + 951, + 2 }, }; + +} diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/XLMRobertaTokenizerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/XLMRobertaTokenizerTests.java index bff2c6a94d789..3fd51601e0138 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/XLMRobertaTokenizerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/XLMRobertaTokenizerTests.java @@ -124,6 +124,30 @@ public void testMultiByteEmoji() throws IOException { } } + public void testMultilingual() throws IOException { + var vocab = XLMRobertaTestVocab.loadMultiLingualTestVocab(); + + try ( + XLMRobertaTokenizer tokenizer = XLMRobertaTokenizer.builder( + vocab.get(), + vocab.scores(), + new XLMRobertaTokenization(false, null, Tokenization.Truncate.NONE, -1) + ).setWithSpecialTokens(true).build() + ) { + for (int i = 0; i < XLMRobertaTestVocab.MULTILINUGAL_TEXTS.length; i++) { + logger.info(i); + TokenizationResult.Tokens tokenization = tokenizer.tokenize( + XLMRobertaTestVocab.MULTILINUGAL_TEXTS[i], + Tokenization.Truncate.FIRST, + -1, + 0, + null + ).get(0); + assertArrayEquals(XLMRobertaTestVocab.EXPECTED_TOKENS[i], tokenization.tokenIds()); + } + } + } + public void testTokenizeWithNeverSplit() throws IOException { try ( XLMRobertaTokenizer tokenizer = XLMRobertaTokenizer.builder( diff --git a/x-pack/plugin/ml/src/test/resources/org/elasticsearch/xpack/ml/inference/tokenizers/xlm_roberta_test_vocabulary.json b/x-pack/plugin/ml/src/test/resources/org/elasticsearch/xpack/ml/inference/tokenizers/xlm_roberta_test_vocabulary.json new file mode 100644 index 0000000000000..99f7d2ae34a04 --- /dev/null +++ b/x-pack/plugin/ml/src/test/resources/org/elasticsearch/xpack/ml/inference/tokenizers/xlm_roberta_test_vocabulary.json @@ -0,0 +1,2172 @@ +{ + "model_id": "anything_to_satisfy_the_parser", + "vocabulary": [ + "", + "", + "", + "", + "、", + "▁نسبت", + "。", + "▁ھن", + "▁ھو", + "「", + "」", + "فض", + "▁سار", + "▁پہنچے", + "’", + "▁Nakon", + "nova", + "jena", + "!", + "▁حديث", + "工業", + "'", + "(", + "▁soba", + ")", + "▁شوق", + "في", + "▁pod", + "株", + ",", + ".", + "قر", + "進", + "3", + "8", + "▁پر", + "▁pot", + ":", + "▁مختار", + "格", + "▁pos", + "▁مزاج", + "قل", + "قم", + "⁄", + "فسر", + "ću", + "く", + "▁bila", + "あげ", + "▁طرح", + "し", + "▁vrhunski", + "▁شعار", + "▁jednu", + "▁والی", + "▁والے", + "▁ہر", + "た", + "a", + "▁ٿي", + "▁طبیعت", + "b", + "▁ڪري", + "▁حدیث", + "e", + "▁ہو", + "g", + "h", + "▁ہم", + "k", + "に", + "l", + "ština", + "m", + "▁نبی", + "n", + "の", + "▁لکڻ", + "s", + "t", + "▁سان", + "▁سبب", + "▁میں", + "▁lit", + "へ", + "べ", + "لف", + "لق", + "لم", + "や", + "ثير", + "مت", + "لي", + "▁pri", + "▁راست", + "り", + "▁عطا", + "▁66", + "خدا", + "を", + "▁kuća", + "▁ڪرڻ", + "▁تھا", + "増", + "▁پکار", + "▁طا", + "▁طب", + "مل", + "▁راز", + "▁prenosi", + "نا", + "▁سہ", + "▁طرف", + "▁بلند", + "英文", + "vne", + "▁،", + "▁سی", + "ائين", + "▁زمين", + "▁zbog", + "点", + "▁stoljeća", + "jeti", + "▁ا", + "▁ب", + "ني", + "▁امير", + "▁آ", + "▁آئے", + "▁خ", + "vod", + "▁سے", + "▁ت", + "▁ج", + "▁ص", + "▁ض", + "▁ط", + "čenja", + "buku", + "▁ظ", + "▁90", + "▁ر", + "vom", + "▁ز", + "▁1990", + "▁ش", + "vor", + "ena", + "ologi", + "▁yn", + "الت", + "یا", + "▁ع", + "ene", + "الا", + "یت", + "وا", + "▁م", + "یح", + "▁ن", + "▁و", + "ید", + "▁صدق", + "▁عن", + "▁za", + "▁ف", + "▁ق", + "یر", + "▁ل", + "者が", + "ور", + "▁عبداللہ", + "▁obra", + "وع", + "الف", + "نٹ", + "ô", + "▁رسول", + "یل", + "▁کوئی", + "ین", + "▁zi", + "uncu", + "▁ٻي", + "لہ", + "▁انجام", + "▁fan", + "▁پنهنجو", + "▁اهو", + "▁vrh", + "بار", + "▁کیا", + "ć", + "▁مستقیم", + "لی", + "▁اور", + "▁اهي", + "لے", + "باط", + "▁پڑھ", + "▁اوهان", + "احمد", + "isto", + "▁ہاتھوں", + "▁پنھنجي", + "يء", + "▁lân", + "▁پ", + "مہ", + "يت", + "▁بال", + "يد", + "▁خرچ", + "ير", + "▁جذب", + "▁لا", + "نگ", + "وار", + "▁ڏ", + "▁شاہد", + "بان", + "▁قرار", + "vanja", + "يف", + "▁خلاف", + "▁بغداد", + "نہ", + "اني", + "ين", + "يه", + "社", + "▁njihova", + "▁مج", + "▁بازار", + "نے", + "▁ک", + "▁ڪ", + "▁تشریف", + "▁مؤ", + "▁ملي", + "ایت", + "▁مع", + "▁ملا", + "▁ہ", + "▁plod", + "یں", + "▁نب", + "▁ملت", + "▁ی", + "▁من", + "▁حیثیت", + "▁ali", + "660", + "وں", + "đe", + "یہ", + "▁لپ", + "đi", + "▁خطا", + "وی", + "▁پڇ", + "▁ایک", + "▁نام", + "šnje", + "▁پٽ", + "▁سلطنت", + "▁۱", + "▁کثیر", + "から", + "▁۽", + "▁dvije", + "▁breed", + "▁۾", + "▁200", + "▁Ana", + "▁مسلمانوں", + "oče", + "▁ukupno", + "▁حاصل", + "▁ترجیح", + "▁پڻ", + "ama", + "انہ", + "tru", + "انی", + "▁dokaz", + "▁آنکھیں", + "▁لکي", + "َّ", + "سلام", + "始", + "▁عرب", + "▁جهڙو", + "cre", + "▁لاء", + "نام", + "▁224", + "▁عورت", + "ُّ", + "▁نبي", + "▁kada", + "▁حضرت", + "▁تعالی", + "▁عدل", + "▁metara", + "▁چئي", + "▁moguće", + "▁مدینہ", + "plo", + "میر", + "▁عبادت", + "صاف", + "▁شهر", + "▁والد", + "▁اسلام", + "出", + "▁آمدن", + "موا", + "世界", + "▁ہے", + "▁نے", + "ِّ", + "▁godina", + "▁هڪ", + "nih", + "▁ostvari", + "ضرار", + "jal", + "▁razm", + "▁prostor", + "▁گئے", + "させる", + "▁لباس", + "▁دعوت", + "▁زمرہ", + "tvr", + "▁اصحاب", + "▁(2)", + "▁وہ", + "▁قسم", + "▁یہ", + "▁deset", + "▁عمر", + "▁المسلمين", + "سار", + "▁شروع", + "▁دولت", + "▁سعد", + "đeni", + "▁تور", + "▁عباس", + "▁خواب", + "▁شریف", + "ودي", + "ڑی", + "▁عمو", + "stavlja", + "的な", + "ary", + "▁وقت", + "海外", + "▁پاڻ", + "▁حجر", + "▁uko", + "▁فرماتے", + "▁pomaže", + "▁عالم", + "▁علم", + "▁گا۔", + "▁رضي", + "▁لانے", + "tili", + "▁عهد", + "▁مخلوق", + "▁ہے۔", + "企業", + "ندا", + "rum", + "▁ڏانهن", + "▁بنا", + "▁بند", + "▁ہزار", + "▁ispod", + "▁نالو", + "وري", + "▁نالي", + "▁خوبی", + "▁sklad", + "▁konačno", + "▁اپنے", + "▁لگ", + "▁ود", + "▁ور", + "▁هي", + "▁خاندان", + "▁generacij", + "lji", + "▁ثروت", + "▁هن", + "▁وا", + "▁هو", + "▁معرفت", + "حضور", + "قار", + "قاب", + "▁قرآن", + "▁عنه", + "▁%", + "▁druge", + "▁'", + "▁(", + "▁-", + "▁پہنچ", + "građ", + "▁materijal", + "nom", + "ڙو", + "▁6", + "▁چھوڑ", + "▁يا", + "▁1", + "▁هٿ", + "動", + "▁4", + "قام", + "1959", + "šne", + "▁:", + "▁پہلے", + "▁یقین", + "拠", + "▁H", + "▁خالی", + "▁مرتبہ", + "▁گذر", + "▁آھي", + "▁اظہار", + "▁مذهبي", + "▁U", + "▁građevin", + "▁نہ", + "▁وچ", + "太平洋", + "▁صفت", + "▁فرما", + "▁tijekom", + "▁بعض", + "▁a", + "‘‘", + "▁سوره", + "▁برکت", + "▁o", + "▁p", + "▁i", + "▁l", + "▁u", + "▁رسالت", + "jit", + "▁veličine", + "▁s", + "▁انتقال", + "化学", + "ša", + "▁صرف", + "خلي", + "▁آغاز", + "▁سخت", + "▁onda", + "1987", + "▁بلکہ", + "ječ", + "▁puno", + "▁traje", + "▁بعد", + "خلق", + "▁کریم", + "iranju", + "▁Ta", + "▁عہد", + "log", + "▁ispred", + "▁ceremoni", + "▁خوا", + "▁liker", + "azi", + "▁شمار", + "▁سلام", + "pane", + "▁što", + "▁علاوه", + "▁کرنے", + "▁راہ", + "▁ہاتھ", + "آخر", + "▁تھے", + "▁dat", + "ستان", + "عادت", + "ješ", + "▁šum", + "▁kruh", + "▁تمہاری", + "▁ہدایت", + "▁احمد", + "▁ڏينهن", + "▁شریعت", + "▁لکھتے", + "▁ٿيا", + "dono", + "icama", + "رحم", + "▁لیکن", + "▁wie", + "▁کرنا", + "化粧品", + "جنب", + "▁ہوگئے", + "▁علاوہ", + "▁پات", + "▁ہیں", + "▁dei", + "▁پاس", + "▁حال", + "▁سندس", + "▁بھی", + "▁stab", + "▁klim", + "▁ٿيو", + "▁محبت", + "▁12.", + "▁ولادت", + "▁zraka", + "以降", + "▁المو", + "▁چيو", + "▁sti", + "den", + "名", + "▁عمرو", + "▁سید", + "▁سیر", + "bina", + "▁nešto", + "▁غربت", + "▁ž", + "▁Zi", + "▁امام", + "▁قلب", + "▁الله", + "▁قلت", + "▁رہے", + "▁13.", + "▁المج", + "▁گھر", + "▁شام", + "▁مکہ", + "عفو", + "▁kraja", + "▁preciz", + "▁دفع", + "▁آهن", + "▁مضمون", + "▁بڑی", + "▁stol", + "▁پٿر", + "▁انور", + "▁čet", + "▁هجڻ", + "وفا", + "▁svojih", + "▁یعنی", + "▁میرے", + "▁کرم", + "seo", + "▁خدمت", + "ce", + "▁جڏهن", + "ch", + "▁متو", + "ACI", + "つく", + "▁بجائے", + "▁مطابق", + "رسل", + "REP", + "▁انهن", + "株式会社", + "▁لیے", + "▁خلافت", + "ruše", + "wol", + "▁جيڪو", + "▁انہوں", + "کر", + "en", + "ڪاري", + "▁مختلف", + "▁لوگ", + "et", + "▁ڏيندو", + "ولت", + "▁انہیں", + "کل", + "▁metodo", + "▁منزل", + "▁حفظ", + "يون", + "ولو", + "2002", + "▁اللہ", + "ڪن", + "حدث", + "▁وسلم", + "اب", + "▁svojem", + "▁مسجد", + "ئي", + "▁znanja", + "اد", + "▁پیدا", + "اس", + "عون", + "▁stik", + "ومن", + "▁’’", + "▁št", + "▁زمان", + "he", + "▁قبائل", + "ال", + "با", + "ان", + "▁عادت", + "▁هئا", + "▁بيت", + "▁lokal", + "▁be", + "▁Kiva", + "▁واپسی", + "▁dok", + "▁zemlju", + "▁کار", + "ؤں", + "▁سختی", + "▁طیب", + "▁slo", + "بل", + "نہیں", + "بن", + "▁آس", + "当初", + "▁گرو", + "im", + "▁بین", + "▁آن", + "▁آم", + "▁dos", + "▁ci", + "▁akumul", + "▁oblika", + "zimi", + "▁محمود", + "改", + "zima", + "je", + "ji", + "▁do", + "▁ابن", + "▁ويو", + "کی", + "▁سفر", + "تز", + "▁دیتی", + "▁بزرگ", + "کار", + "ju", + "▁اطمینان", + "▁de", + "▁آپ", + "tari", + "▁کا", + "لفت", + "ka", + "گا", + "▁dy", + "ائي", + "ته", + "▁ٿيون", + "▁کان", + "▁کش", + "には", + "صلى", + "▁کر", + "▁en", + "ثر", + "▁بنی", + "▁کد", + "▁فرمایا", + "اؤں", + "成長", + "ئے", + "▁لقب", + "▁ڳالھ", + "会社", + "▁هئي", + "▁fur", + "اں", + "la", + "le", + "uru", + "جا", + "li", + "▁صلى", + "ثي", + "▁", + "lo", + "▁nije", + "جر", + "▁تعبير", + "جز", + "▁اگر", + "ا۔", + "▁دریافت", + "創業", + "▁کئے", + "ma", + "▁Kao", + "▁Men", + "▁konstrukcij", + "AMO", + "بی", + "mu", + "▁ممتاز", + "▁سڄو", + "kron", + "ne", + "▁سياست", + "حن", + "خا", + "▁men", + "krov", + "▁سڄي", + "▁رهيا", + "▁mei", + "▁سالم", + "▁سالن", + "no", + "▁عليه", + "▁بیان", + "▁گمراہ", + "▁سرفراز", + "▁مال", + "▁ٻنهي", + "nu", + "▁مان", + "▁رفع", + "▁هجر", + "▁اختلاف", + "of", + "حافظ", + "▁سنا", + "▁جہاد", + "▁društva", + "ائی", + "نون", + "om", + "▁osjeća", + "▁وڏو", + "irano", + "ئين", + "▁زيارت", + "irana", + "مار", + "دو", + "▁طريق", + "▁ar", + "▁poput", + "▁کے", + "▁کی", + "▁jo", + "،", + "▁gli", + "▁کرتے", + "▁koje", + "▁تم", + "▁تو", + "▁ki", + "▁ته", + "ؓ", + "▁گا", + "lā", + "جہ", + "را", + "iš", + "▁دیکھ", + "رج", + "مام", + "رح", + "رخ", + "▁تع", + "▁vje", + "آ", + "▁između", + "دھی", + "ا", + "ب", + "ت", + "▁تي", + "ج", + "▁koja", + "ح", + "▁اک", + "▁جان", + "د", + "▁فرم", + "ذ", + "▁جب", + "ر", + "▁جا", + "▁pojedin", + "ز", + "olo", + "س", + "▁گه", + "ش", + "▁حضور", + "ض", + "▁گو", + "▁Verde", + "▁غالب", + "ري", + "ع", + "▁گم", + "ڪنھن", + "▁گل", + "لوب", + "▁يزيد", + "▁فري", + "ndro", + "▁خدا", + "▁اے", + "ف", + "ق", + "ru", + "▁nepo", + "ل", + "م", + "ن", + "ه", + "▁dizajn", + "و", + "ي", + "▁جر", + "ٌ", + "▁جائے", + "ٍ", + "long", + "َ", + "ُ", + "▁جو", + "ِ", + "ّ", + "se", + "▁جي", + "ْ", + "▁جن", + "ٖ", + "▁بہ", + "sk", + "▁Tim", + "▁غیر", + "▁lang", + "▁kamen", + "▁my", + "▁ہوجائیں", + "ارا", + "▁حس", + "▁بخاری", + "▁ہیں۔", + "▁قابل", + "ٰ", + "te", + "دہ", + "سن", + "年", + "ادي", + "▁na", + "stan", + "ٹ", + "ٺ", + "▁حق", + "پ", + "▁الآ", + "دے", + "▁fo", + "▁struktur", + "ڇ", + "▁کو", + "▁الا", + "▁ممنوع", + "ڍ", + "شق", + "▁کي", + "▁ڪن", + "▁بھیج", + "▁تمہیں", + "dali", + "عطاء", + "▁Gra", + "صح", + "un", + "ڙ", + "▁gr", + "▁اس", + "▁از", + "▁ار", + "قول", + "ک", + "▁اح", + "ve", + "▁اب", + "▁ٻيهر", + "▁ام", + "▁vodo", + "▁ال", + "▁فضا", + "▁مذکور", + "▁klimat", + "▁تقوی", + "ڻ", + "ھ", + "▁ڪيائين", + "ہ", + "رے", + "▁hladno", + "ۃ", + "▁بد", + "▁بہتر", + "▁بر", + "روا", + "ی", + "روج", + "▁با", + "▁ان", + "▁بات", + "▁او", + "▁بن", + "ے", + "ûn", + "۔", + "▁بار", + "▁باس", + "dane", + "▁الق", + "▁najpoznatij", + "▁الل", + "▁الم", + "▁Vrh", + "▁is", + "oblikovan", + "▁in", + "▁الطب", + "ڻي", + "▁je", + "ادی", + "▁کہ", + "へと", + "▁صفات", + "▁غنی", + "▁nekoliko", + "▁کڻ", + "▁معاف", + "سے", + "変更", + "▁sa", + "▁سرانجام", + "▁فتح", + "月", + "▁vremena", + "عب", + "▁کڻي", + "عت", + "▁سر", + "▁دور", + "▁ست", + "عر", + "▁اسی", + "▁سب", + "▁se", + "▁protek", + "本", + "▁su", + "jedi", + "▁40", + "ھر", + "国際", + "▁سو", + "▁te", + "▁ساتھ", + "▁tink", + "ھل", + "oksen", + "غو", + "▁ڏنو", + "ں۔", + "▁شن", + "علامه", + "▁ži", + "▁va", + "حمد", + "ija", + "▁محمد", + "▁سڀ", + "حضرت", + "ije", + "▁دون", + "▁ڏنا", + "▁دکھا", + "▁koe", + "▁حلق", + "▁سڏ", + "▁مشرف", + "ٹے", + "▁خواہش", + "kom", + "▁ili", + "▁تک", + "▁ima", + "رائي", + "صحاب", + "▁of", + "▁تہ", + "▁ob", + "▁نہیں", + "▁od", + "▁خل", + "▁جنهن", + "▁Mesa", + "▁gradi", + "▁قائم", + "▁رکھا", + "▁دين", + "اعات", + "▁آواز", + "izaci", + "▁اٹھا", + "▁دل", + "ima", + "▁danas", + "▁گھ", + "▁گهر", + "▁broja", + "▁رکھنے", + "▁حکمت", + "▁po", + "▁قبول", + "ٽڪ", + "▁پيء", + "▁مجتمع", + "▁اعمال", + "ine", + "FIC", + "▁احسان", + "▁حین", + "gje", + "▁18", + "▁ہوئی", + "▁کہا", + "▁قبیل", + "▁ro", + "▁دی۔", + "▁civilizaci", + "▁teori", + "めた", + "▁okrug", + "▁današnji", + "▁گذري", + "▁ہوئے", + "ega", + "▁تمام" + ], + "scores": [ + 0.0, + 0.0, + 0.0, + 0.0, + -6.610896110534668, + -11.903949737548828, + -6.411019802093506, + -13.111821174621582, + -12.475632667541504, + -8.94989013671875, + -8.913808822631836, + -12.612136840820312, + -13.197681427001953, + -14.200822830200195, + -6.379403591156006, + -12.10725212097168, + -11.451247215270996, + -12.569819450378418, + -6.61658239364624, + -12.716913223266602, + -12.647109031677246, + -6.345553398132324, + -7.722129821777344, + -13.328119277954102, + -5.9974517822265625, + -13.542387008666992, + -11.525911331176758, + -9.303495407104492, + -13.15868091583252, + -3.4635426998138428, + -3.625642776489258, + -12.082132339477539, + -11.200728416442871, + -8.533885955810547, + -9.478791236877441, + -8.830430030822754, + -9.85542106628418, + -5.629745960235596, + -10000.0, + -11.07493782043457, + -10.675272941589355, + -14.053406715393066, + -12.350106239318848, + -13.201828002929688, + -15.362364768981934, + -14.316963195800781, + -11.72597599029541, + -10.451481819152832, + -10.200002670288086, + -12.970951080322266, + -10.799960136413574, + -9.750066757202148, + -14.09317684173584, + -13.132211685180664, + -12.164549827575684, + -11.790772438049316, + -11.185127258300781, + -11.52169418334961, + -10.283937454223633, + -5.5477118492126465, + -10.650121688842773, + -13.610538482666016, + -8.289443016052246, + -10.517338752746582, + -10000.0, + -5.701941967010498, + -10.288777351379395, + -7.932966709136963, + -7.701241970062256, + -10.9612398147583, + -7.4715776443481445, + -8.438796997070312, + -7.762022495269775, + -13.653663635253906, + -6.647110939025879, + -13.096002578735352, + -6.093497276306152, + -7.835560321807861, + -13.766554832458496, + -5.072621822357178, + -6.071900844573975, + -10.290907859802246, + -11.627830505371094, + -8.404854774475098, + -11.820650100708008, + -11.6625394821167, + -12.946660041809082, + -11.946572303771973, + -13.20298957824707, + -12.11659049987793, + -9.643321990966797, + -12.949349403381348, + -11.78995132446289, + -10.989119529724121, + -8.787092208862305, + -12.648849487304688, + -10.379737854003906, + -13.063958168029785, + -11.535991668701172, + -13.839150428771973, + -8.22523021697998, + -13.274272918701172, + -11.137674331665039, + -10.805622100830078, + -12.990604400634766, + -14.285995483398438, + -13.078483581542969, + -12.852004051208496, + -11.508638381958008, + -12.764389991760254, + -13.693453788757324, + -9.525500297546387, + -13.369109153747559, + -10.901957511901855, + -12.365242004394531, + -12.302881240844727, + -12.062744140625, + -9.150372505187988, + -10.726777076721191, + -12.626052856445312, + -12.744816780090332, + -11.537252426147461, + -10.271102905273438, + -13.577858924865723, + -12.193032264709473, + -9.33310604095459, + -9.089756965637207, + -10.834887504577637, + -13.551883697509766, + -10.807448387145996, + -12.546935081481934, + -10.950186729431152, + -11.474028587341309, + -8.803434371948242, + -9.171648025512695, + -10.806365966796875, + -10.984315872192383, + -12.26717758178711, + -11.871655464172363, + -13.023716926574707, + -13.473764419555664, + -13.253439903259277, + -10.311766624450684, + -10.744394302368164, + -12.47635269165039, + -11.38111400604248, + -11.568384170532227, + -10000.0, + -10.497817039489746, + -10.765369415283203, + -11.620940208435059, + -8.651301383972168, + -12.216012001037598, + -11.396681785583496, + -10.353537559509277, + -9.594635963439941, + -12.703508377075195, + -10.891910552978516, + -9.981459617614746, + -9.07016372680664, + -13.325227737426758, + -9.9458646774292, + -7.174049377441406, + -10.452103614807129, + -13.543808937072754, + -10000.0, + -7.37307596206665, + -10000.0, + -10.899341583251953, + -11.181215286254883, + -9.23928451538086, + -13.12946605682373, + -10.758359909057617, + -14.190896987915039, + -10.881155967712402, + -11.799145698547363, + -13.552739143371582, + -12.35738754272461, + -10.790441513061523, + -11.642875671386719, + -11.203944206237793, + -11.164298057556152, + -10.391376495361328, + -10.602131843566895, + -13.296408653259277, + -12.801287651062012, + -13.29976749420166, + -11.127630233764648, + -9.635873794555664, + -13.025283813476562, + -11.522773742675781, + -12.629497528076172, + -11.675955772399902, + -10.100441932678223, + -9.490818977355957, + -12.654541015625, + -11.304871559143066, + -8.778549194335938, + -12.341999053955078, + -12.693137168884277, + -12.534963607788086, + -12.560155868530273, + -12.807035446166992, + -14.408479690551758, + -10.91116714477539, + -13.606574058532715, + -13.693682670593262, + -11.006491661071777, + -13.511610984802246, + -10.69263744354248, + -10000.0, + -10.88202953338623, + -9.945484161376953, + -11.082690238952637, + -13.169434547424316, + -10.761433601379395, + -12.539514541625977, + -9.714284896850586, + -10.531815528869629, + -11.411252975463867, + -12.159621238708496, + -13.439103126525879, + -12.159096717834473, + -10.569905281066895, + -11.485320091247559, + -11.685418128967285, + -11.131010055541992, + -13.32590389251709, + -12.843395233154297, + -11.425615310668945, + -9.176533699035645, + -10.86955738067627, + -11.128808975219727, + -13.251603126525879, + -12.20699405670166, + -11.551314353942871, + -10.626527786254883, + -11.38455581665039, + -11.614538192749023, + -14.187246322631836, + -12.982544898986816, + -11.797250747680664, + -10000.0, + -9.858101844787598, + -12.285886764526367, + -12.553010940551758, + -13.370101928710938, + -10.696676254272461, + -12.74817180633545, + -12.134454727172852, + -11.036406517028809, + -8.165318489074707, + -13.548136711120605, + -9.375162124633789, + -13.292466163635254, + -9.353793144226074, + -11.82857894897461, + -11.406195640563965, + -13.611187934875488, + -12.325207710266113, + -13.719786643981934, + -11.11467170715332, + -13.631454467773438, + -9.855673789978027, + -10.353020668029785, + -13.05349349975586, + -13.028356552124023, + -13.965872764587402, + -12.046480178833008, + -10000.0, + -9.703826904296875, + -9.335156440734863, + -12.247420310974121, + -13.311925888061523, + -9.245621681213379, + -9.983458518981934, + -10.990195274353027, + -12.795949935913086, + -13.135777473449707, + -13.378888130187988, + -11.403210639953613, + -13.6084566116333, + -12.680025100708008, + -10.440314292907715, + -12.222440719604492, + -11.42122745513916, + -11.383726119995117, + -12.9137601852417, + -14.476696968078613, + -13.467201232910156, + -12.135478973388672, + -12.71440601348877, + -12.663864135742188, + -11.900956153869629, + -13.902737617492676, + -11.216065406799316, + -10.742656707763672, + -10000.0, + -13.595476150512695, + -12.485321044921875, + -13.868348121643066, + -13.327157020568848, + -11.195040702819824, + -11.418901443481445, + -12.40562915802002, + -13.329121589660645, + -13.79185962677002, + -13.747245788574219, + -12.834506034851074, + -14.360300064086914, + -11.659099578857422, + -12.826003074645996, + -13.300061225891113, + -13.438057899475098, + -10.775205612182617, + -11.984052658081055, + -11.038277626037598, + -9.496801376342773, + -13.454340934753418, + -12.906173706054688, + -10.528743743896484, + -9.034796714782715, + -9.208064079284668, + -13.150303840637207, + -11.386055946350098, + -10.825066566467285, + -9.702229499816895, + -13.07643985748291, + -13.973017692565918, + -10.952235221862793, + -13.427935600280762, + -11.593998908996582, + -11.631296157836914, + -12.894272804260254, + -12.372183799743652, + -12.643047332763672, + -13.63158893585205, + -12.459877967834473, + -14.055868148803711, + -9.961138725280762, + -10.343199729919434, + -11.893840789794922, + -10.064606666564941, + -12.285531997680664, + -11.363245010375977, + -13.014981269836426, + -12.707101821899414, + -11.332316398620605, + -11.302778244018555, + -13.055937767028809, + -13.674442291259766, + -11.654837608337402, + -12.616765975952148, + -12.207862854003906, + -11.831457138061523, + -13.68111801147461, + -12.976818084716797, + -14.430807113647461, + -12.421124458312988, + -11.50658893585205, + -11.110703468322754, + -10.588051795959473, + -11.868654251098633, + -12.110957145690918, + -13.921645164489746, + -13.405242919921875, + -13.995997428894043, + -14.098799705505371, + -12.004497528076172, + -11.506338119506836, + -12.163915634155273, + -13.178263664245605, + -14.001510620117188, + -12.172317504882812, + -13.293499946594238, + -10000.0, + -10000.0, + -11.053098678588867, + -11.700507164001465, + -9.89022159576416, + -13.137804985046387, + -11.584470748901367, + -11.47780704498291, + -12.676255226135254, + -13.47961711883545, + -13.04322338104248, + -12.198275566101074, + -13.25001335144043, + -12.475261688232422, + -12.113178253173828, + -13.952543258666992, + -10.791595458984375, + -12.600780487060547, + -12.942652702331543, + -11.553879737854004, + -10.698843002319336, + -12.703505516052246, + -13.684463500976562, + -12.352499961853027, + -13.843332290649414, + -10.838998794555664, + -10.505772590637207, + -9.910398483276367, + -10000.0, + -13.492959976196289, + -12.739365577697754, + -12.663825035095215, + -11.77187728881836, + -12.291872024536133, + -9.96491813659668, + -11.795421600341797, + -8.20328426361084, + -5.926211357116699, + -6.1764984130859375, + -12.557028770446777, + -13.187960624694824, + -13.275311470031738, + -9.935545921325684, + -12.842405319213867, + -8.376090049743652, + -12.797646522521973, + -10.724736213684082, + -7.551031589508057, + -12.609972953796387, + -10.906079292297363, + -8.062232971191406, + -12.41627025604248, + -14.217342376708984, + -13.32412338256836, + -7.829031944274902, + -11.670573234558105, + -13.152725219726562, + -13.748356819152832, + -8.923832893371582, + -13.297384262084961, + -13.406578063964844, + -12.794132232666016, + -12.182308197021484, + -12.669015884399414, + -13.825421333312988, + -8.497254371643066, + -13.710942268371582, + -10.821403503417969, + -12.96695327758789, + -13.50233268737793, + -14.252979278564453, + -12.690325736999512, + -12.644253730773926, + -11.217681884765625, + -5.530364990234375, + -11.728997230529785, + -14.01970100402832, + -14.183956146240234, + -6.582267761230469, + -9.365259170532227, + -6.0461626052856445, + -7.482025623321533, + -7.319528579711914, + -13.754130363464355, + -12.329744338989258, + -14.684525489807129, + -7.452380657196045, + -12.367213249206543, + -13.013487815856934, + -10.484649658203125, + -11.331403732299805, + -13.430648803710938, + -11.846324920654297, + -11.766498565673828, + -11.94919490814209, + -13.64667797088623, + -12.330714225769043, + -12.643916130065918, + -12.118013381958008, + -12.920206069946289, + -9.852779388427734, + -13.62667465209961, + -12.794713020324707, + -13.23983097076416, + -9.644025802612305, + -13.41153335571289, + -10.862459182739258, + -13.595255851745605, + -12.811727523803711, + -12.114456176757812, + -12.617325782775879, + -12.534378051757812, + -12.435154914855957, + -11.79420280456543, + -13.141073226928711, + -10.228925704956055, + -12.648173332214355, + -10.5259370803833, + -13.075540542602539, + -12.833207130432129, + -12.930810928344727, + -11.625775337219238, + -8.988334655761719, + -11.492377281188965, + -14.40893268585205, + -12.106353759765625, + -13.25969409942627, + -13.190732955932617, + -14.228679656982422, + -13.389674186706543, + -11.702837944030762, + -12.21057415008545, + -13.743621826171875, + -14.52221393585205, + -13.440570831298828, + -12.3108491897583, + -13.243945121765137, + -13.412277221679688, + -11.340847969055176, + -9.651451110839844, + -11.63448429107666, + -14.10894775390625, + -13.651695251464844, + -13.614228248596191, + -12.83806037902832, + -13.661396026611328, + -9.87491512298584, + -9.8951416015625, + -11.809096336364746, + -10000.0, + -11.665786743164062, + -9.869390487670898, + -13.02005672454834, + -13.772218704223633, + -12.269754409790039, + -10000.0, + -10.893101692199707, + -14.356070518493652, + -13.704068183898926, + -13.16357707977295, + -13.131183624267578, + -11.631052017211914, + -11.685710906982422, + -8.907776832580566, + -9.99026870727539, + -14.045997619628906, + -12.187337875366211, + -13.198140144348145, + -11.964822769165039, + -12.250166893005371, + -14.266410827636719, + -11.802629470825195, + -11.381916046142578, + -11.162945747375488, + -12.44157886505127, + -9.786359786987305, + -10000.0, + -11.233309745788574, + -11.009201049804688, + -12.883721351623535, + -11.877254486083984, + -12.20182991027832, + -14.277528762817383, + -13.7249755859375, + -12.742781639099121, + -12.661029815673828, + -12.547115325927734, + -10.75460147857666, + -12.907571792602539, + -12.363632202148438, + -11.848713874816895, + -14.31727123260498, + -14.046844482421875, + -12.718457221984863, + -13.913930892944336, + -13.322484016418457, + -12.36288833618164, + -11.979717254638672, + -12.366744041442871, + -13.646872520446777, + -13.255087852478027, + -12.110769271850586, + -8.450657844543457, + -12.003588676452637, + -8.075675010681152, + -13.522724151611328, + -13.485895156860352, + -13.05333423614502, + -13.399734497070312, + -11.368695259094238, + -13.210000038146973, + -13.97340202331543, + -11.725092887878418, + -13.334667205810547, + -10.739959716796875, + -13.98511791229248, + -13.574196815490723, + -12.201776504516602, + -12.244017601013184, + -11.784577369689941, + -11.283102035522461, + -6.465692520141602, + -13.550567626953125, + -10000.0, + -11.969606399536133, + -7.697822093963623, + -14.338610649108887, + -13.137377738952637, + -12.166990280151367, + -11.836442947387695, + -13.03665542602539, + -10000.0, + -12.438132286071777, + -11.183541297912598, + -12.041357040405273, + -12.614006996154785, + -11.056611061096191, + -12.743069648742676, + -12.888111114501953, + -11.329586029052734, + -10.89394760131836, + -13.152234077453613, + -12.330127716064453, + -11.13021469116211, + -12.93645191192627, + -11.057968139648438, + -10000.0, + -10.721052169799805, + -13.35482406616211, + -12.192888259887695, + -13.680505752563477, + -11.670418739318848, + -11.871618270874023, + -11.242685317993164, + -9.557723999023438, + -10000.0, + -10000.0, + -11.125853538513184, + -9.22323226928711, + -13.24915599822998, + -11.91252613067627, + -12.721294403076172, + -11.174097061157227, + -7.990510940551758, + -13.991429328918457, + -13.859502792358398, + -11.520356178283691, + -13.971871376037598, + -10.134190559387207, + -13.07124137878418, + -13.591009140014648, + -10000.0, + -12.195642471313477, + -11.237064361572266, + -14.172703742980957, + -12.285969734191895, + -12.486908912658691, + -13.344427108764648, + -14.147533416748047, + -8.739598274230957, + -10.91167163848877, + -9.65263843536377, + -12.717270851135254, + -9.05600643157959, + -9.518941879272461, + -13.623188972473145, + -13.740755081176758, + -12.843908309936523, + -12.340595245361328, + -11.486748695373535, + -12.710258483886719, + -8.179301261901855, + -9.062299728393555, + -6.97949743270874, + -11.858112335205078, + -11.286624908447266, + -11.264288902282715, + -11.513409614562988, + -12.92937183380127, + -13.888775825500488, + -11.602699279785156, + -11.45335578918457, + -8.737150192260742, + -13.517394065856934, + -5.306643009185791, + -10.628350257873535, + -11.167513847351074, + -8.796040534973145, + -10000.0, + -7.871254920959473, + -12.365166664123535, + -10.174576759338379, + -11.040783882141113, + -10.433000564575195, + -13.301560401916504, + -10.412057876586914, + -12.430901527404785, + -10.438301086425781, + -12.994340896606445, + -9.674080848693848, + -6.31805419921875, + -10000.0, + -13.583410263061523, + -12.609077453613281, + -12.849180221557617, + -13.826027870178223, + -12.031959533691406, + -11.845420837402344, + -13.824118614196777, + -14.176135063171387, + -11.988388061523438, + -11.812614440917969, + -11.836989402770996, + -11.457304000854492, + -7.770702362060547, + -7.727717399597168, + -11.643401145935059, + -11.470467567443848, + -7.818939208984375, + -11.843179702758789, + -13.107500076293945, + -3.9299705028533936, + -8.545232772827148, + -10.604230880737305, + -11.725444793701172, + -14.248205184936523, + -12.327702522277832, + -10.582293510437012, + -13.244439125061035, + -12.059713363647461, + -12.921284675598145, + -12.883295059204102, + -8.039029121398926, + -12.784309387207031, + -9.565606117248535, + -13.330506324768066, + -13.591753005981445, + -11.633308410644531, + -8.91528034210205, + -13.258749961853027, + -14.396084785461426, + -12.985039710998535, + -7.417489528656006, + -13.149593353271484, + -12.515260696411133, + -12.587913513183594, + -8.725703239440918, + -12.955209732055664, + -13.303566932678223, + -12.77481460571289, + -10.94013786315918, + -12.95765495300293, + -12.781828880310059, + -7.8542914390563965, + -10.790460586547852, + -11.324527740478516, + -10000.0, + -14.37807559967041, + -11.92667007446289, + -13.613455772399902, + -8.873832702636719, + -10.859443664550781, + -10000.0, + -13.676170349121094, + -10000.0, + -10.775018692016602, + -12.95517349243164, + -13.232227325439453, + -14.387094497680664, + -12.78573989868164, + -11.10414981842041, + -12.770772933959961, + -8.37712574005127, + -13.284975051879883, + -12.99605655670166, + -12.795123100280762, + -12.59602165222168, + -14.046062469482422, + -12.717855453491211, + -11.992505073547363, + -11.261098861694336, + -11.927038192749023, + -7.993113040924072, + -12.678472518920898, + -8.111339569091797, + -8.321968078613281, + -9.167539596557617, + -6.938859939575195, + -10.086030006408691, + -11.12663459777832, + -10.549439430236816, + -10.726285934448242, + -9.660466194152832, + -8.06203842163086, + -9.214642524719238, + -13.663848876953125, + -11.201567649841309, + -11.692606925964355, + -10000.0, + -11.061088562011719, + -11.801623344421387, + -12.245379447937012, + -12.138701438903809, + -12.882545471191406, + -12.515228271484375, + -13.013315200805664, + -10000.0, + -12.440690994262695, + -11.182541847229004, + -12.293285369873047, + -13.289390563964844, + -8.25012493133545, + -9.714176177978516, + -8.54870319366455, + -10.123932838439941, + -10.123574256896973, + -10.787864685058594, + -10.535843849182129, + -12.084434509277344, + -11.367830276489258, + -9.272168159484863, + -12.730428695678711, + -10.77696418762207, + -11.299437522888184, + -9.2794771194458, + -10.121893882751465, + -12.715132713317871, + -9.817319869995117, + -11.207874298095703, + -9.38442325592041, + -12.960518836975098, + -9.32898998260498, + -11.51174259185791, + -10.884286880493164, + -11.72599983215332, + -13.018866539001465, + -13.688644409179688, + -11.104406356811523, + -9.628833770751953, + -10000.0, + -14.26093864440918, + -11.645624160766602, + -13.037396430969238, + -13.70888900756836, + -14.17890739440918, + -12.572925567626953, + -11.417001724243164, + -11.92243480682373, + -10000.0, + -10000.0, + -9.334487915039062, + -11.304604530334473, + -8.963071823120117, + -8.566967010498047, + -8.42164134979248, + -7.929381370544434, + -12.258378028869629, + -8.675025939941406, + -7.946563243865967, + -12.063398361206055, + -11.749732971191406, + -10000.0, + -10000.0, + -11.486349105834961, + -8.294340133666992, + -8.934319496154785, + -9.14141845703125, + -8.774731636047363, + -9.606856346130371, + -7.985258102416992, + -8.77881145477295, + -9.519185066223145, + -11.011818885803223, + -10000.0, + -12.663806915283203, + -9.498204231262207, + -11.668294906616211, + -11.114726066589355, + -10.064451217651367, + -12.4163236618042, + -9.18289566040039, + -14.760283470153809, + -12.412688255310059, + -12.485615730285645, + -13.954992294311523, + -10.590344429016113, + -11.5427827835083, + -10.981088638305664, + -7.230983257293701, + -12.179085731506348, + -11.862399101257324, + -8.748784065246582, + -12.161140441894531, + -6.327768802642822, + -10.773768424987793, + -11.07607364654541, + -12.150201797485352, + -11.026751518249512, + -10.101223945617676, + -12.312088966369629, + -13.726814270019531, + -10.998577117919922, + -11.658036231994629, + -12.724591255187988, + -8.90641975402832, + -10.587677001953125, + -13.232854843139648, + -12.673201560974121, + -12.671024322509766, + -9.579854965209961, + -12.255494117736816, + -13.195446968078613, + -13.792229652404785, + -12.136938095092773, + -14.033784866333008, + -11.097009658813477, + -12.227850914001465, + -8.381165504455566, + -11.749275207519531, + -10.788141250610352, + -9.176112174987793, + -8.276718139648438, + -12.19137954711914, + -12.501619338989258, + -10000.0, + -12.824344635009766, + -8.281817436218262, + -11.095662117004395, + -14.165250778198242, + -11.348094940185547, + -12.567456245422363, + -8.52457332611084, + -12.733758926391602, + -14.122416496276855, + -12.041360855102539, + -14.223989486694336, + -10.91444206237793, + -11.301746368408203, + -13.864544868469238, + -9.358236312866211, + -12.629053115844727, + -13.954301834106445, + -12.757906913757324, + -11.3294677734375, + -12.579774856567383, + -9.734641075134277, + -12.667106628417969, + -7.761864185333252, + -13.73751163482666, + -8.641522407531738, + -9.142242431640625, + -11.219501495361328, + -8.902569770812988, + -10.32934284210205, + -9.886985778808594, + -12.262642860412598, + -10.265408515930176, + -10.638322830200195, + -13.340587615966797, + -13.356618881225586, + -11.643696784973145, + -14.654583930969238, + -11.425148963928223, + -9.355377197265625, + -13.895018577575684, + -7.402246475219727, + -14.136250495910645, + -6.3201680183410645, + -13.139766693115234, + -12.429423332214355, + -6.986958980560303, + -12.735187530517578, + -9.379314422607422, + -13.72009563446045, + -14.089472770690918, + -13.748627662658691, + -11.978914260864258, + -13.91929817199707, + -13.818605422973633, + -12.800165176391602, + -12.482606887817383, + -7.0481696128845215, + -13.999007225036621, + -12.334272384643555, + -9.02311897277832, + -12.519035339355469, + -12.279437065124512, + -13.015851020812988, + -11.549497604370117, + -10.443224906921387, + -11.093907356262207, + -11.322037696838379, + -12.739632606506348, + -11.954368591308594, + -11.006121635437012, + -6.716159820556641, + -13.072667121887207, + -9.844562530517578, + -7.880402088165283, + -12.887127876281738, + -9.566628456115723, + -12.454107284545898, + -13.231634140014648, + -10.958243370056152, + -7.653661727905273, + -10.862187385559082, + -12.770365715026855, + -13.171697616577148, + -12.640562057495117, + -12.947507858276367, + -12.908676147460938, + -13.518932342529297, + -12.32068157196045, + -13.127158164978027, + -11.459029197692871, + -8.377157211303711, + -10000.0, + -10.775200843811035, + -10.465039253234863, + -12.271913528442383, + -14.012896537780762, + -11.834538459777832, + -11.865020751953125, + -13.912064552307129, + -13.423148155212402, + -12.969127655029297, + -13.150151252746582, + -13.349272727966309, + -13.358369827270508, + -13.805335998535156, + -10000.0, + -9.97810173034668, + -9.83073902130127, + -10.826997756958008, + -10.815749168395996, + -13.47970962524414, + -13.573468208312988, + -7.573562145233154, + -13.315520286560059, + -9.457472801208496, + -10.05649185180664, + -8.165127754211426, + -12.03433609008789, + -11.478641510009766, + -13.337766647338867, + -12.477649688720703, + -12.112797737121582, + -13.071025848388672, + -13.196009635925293, + -13.268590927124023, + -12.347124099731445, + -13.836188316345215, + -12.877021789550781, + -11.046356201171875, + -9.727120399475098, + -11.82292366027832, + -12.20386028289795, + -11.933242797851562, + -13.743108749389648, + -10000.0, + -13.26471996307373, + -7.864959716796875, + -12.188377380371094, + -13.456745147705078, + -13.93160629272461, + -10000.0, + -12.802282333374023, + -9.642523765563965, + -13.602763175964355, + -13.777410507202148, + -14.326563835144043, + -12.257325172424316, + -8.880353927612305, + -11.984357833862305, + -10.867341995239258, + -13.713247299194336, + -10.132527351379395, + -13.895491600036621, + -14.169499397277832, + -11.725127220153809, + -13.49624252319336, + -13.843846321105957, + -13.832304000854492, + -13.618553161621094, + -11.058784484863281, + -10.402983665466309, + -10.856165885925293 + ] +} + diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/ProfilingIndexTemplateRegistry.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/ProfilingIndexTemplateRegistry.java index 066a975d7de7d..86cefd71b2d5f 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/ProfilingIndexTemplateRegistry.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/ProfilingIndexTemplateRegistry.java @@ -49,7 +49,8 @@ public class ProfilingIndexTemplateRegistry extends IndexTemplateRegistry { // version 6: Added 'host.arch' keyword mapping to profiling-hosts // version 7: Added 'host.type', 'cloud.provider', 'cloud.region' keyword mappings to profiling-hosts // version 8: Changed from disabled _source to synthetic _source for profiling-events-* and profiling-metrics - public static final int INDEX_TEMPLATE_VERSION = 8; + // version 9: Changed sort order for profiling-events-* + public static final int INDEX_TEMPLATE_VERSION = 9; // history for individual indices / index templates. Only bump these for breaking changes that require to create a new index public static final int PROFILING_EVENTS_VERSION = 3; diff --git a/x-pack/plugin/ql/test-fixtures/src/main/java/org/elasticsearch/xpack/ql/CsvSpecReader.java b/x-pack/plugin/ql/test-fixtures/src/main/java/org/elasticsearch/xpack/ql/CsvSpecReader.java index 7243eae34ac6b..45d3653a28b6a 100644 --- a/x-pack/plugin/ql/test-fixtures/src/main/java/org/elasticsearch/xpack/ql/CsvSpecReader.java +++ b/x-pack/plugin/ql/test-fixtures/src/main/java/org/elasticsearch/xpack/ql/CsvSpecReader.java @@ -31,7 +31,7 @@ public static class CsvSpecParser implements SpecReader.Parser { private final StringBuilder earlySchema = new StringBuilder(); private final StringBuilder query = new StringBuilder(); private final StringBuilder data = new StringBuilder(); - private final List requiredFeatures = new ArrayList<>(); + private final List requiredCapabilities = new ArrayList<>(); private CsvTestCase testCase; private CsvSpecParser() {} @@ -44,7 +44,7 @@ public Object parse(String line) { assertThat("Early schema already declared " + earlySchema, earlySchema.length(), is(0)); earlySchema.append(line.substring(SCHEMA_PREFIX.length()).trim()); } else if (line.toLowerCase(Locale.ROOT).startsWith("required_feature:")) { - requiredFeatures.add(line.substring("required_feature:".length()).trim()); + requiredCapabilities.add(line.substring("required_feature:".length()).trim().replace("esql.", "")); } else { if (line.endsWith(";")) { // pick up the query @@ -52,8 +52,8 @@ public Object parse(String line) { query.append(line.substring(0, line.length() - 1).trim()); testCase.query = query.toString(); testCase.earlySchema = earlySchema.toString(); - testCase.requiredFeatures = List.copyOf(requiredFeatures); - requiredFeatures.clear(); + testCase.requiredCapabilities = List.copyOf(requiredCapabilities); + requiredCapabilities.clear(); earlySchema.setLength(0); query.setLength(0); } @@ -111,7 +111,7 @@ public static class CsvTestCase { private final List expectedWarningsRegexString = new ArrayList<>(); private final List expectedWarningsRegex = new ArrayList<>(); public boolean ignoreOrder; - public List requiredFeatures = List.of(); + public List requiredCapabilities = List.of(); // The emulated-specific warnings must always trail the non-emulated ones, if these are present. Otherwise, the closing bracket // would need to be changed to a less common sequence (like `]#` maybe). diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupUsageTransportAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupUsageTransportAction.java index a15dc19bb4abf..c3b568fc32b71 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupUsageTransportAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupUsageTransportAction.java @@ -12,6 +12,8 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.core.Predicates; +import org.elasticsearch.persistent.PersistentTasksCustomMetadata; import org.elasticsearch.protocol.xpack.XPackUsageRequest; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; @@ -20,6 +22,7 @@ import org.elasticsearch.xpack.core.action.XPackUsageFeatureResponse; import org.elasticsearch.xpack.core.action.XPackUsageFeatureTransportAction; import org.elasticsearch.xpack.core.rollup.RollupFeatureSetUsage; +import org.elasticsearch.xpack.core.rollup.job.RollupJob; public class RollupUsageTransportAction extends XPackUsageFeatureTransportAction { @@ -48,8 +51,12 @@ protected void masterOperation( ClusterState state, ActionListener listener ) { - // TODO expose the currently running rollup tasks on this node? Unclear the best way to do that - RollupFeatureSetUsage usage = new RollupFeatureSetUsage(); + int numberOfRollupJobs = 0; + PersistentTasksCustomMetadata persistentTasks = state.metadata().custom(PersistentTasksCustomMetadata.TYPE); + if (persistentTasks != null) { + numberOfRollupJobs = persistentTasks.findTasks(RollupJob.NAME, Predicates.always()).size(); + } + RollupFeatureSetUsage usage = new RollupFeatureSetUsage(numberOfRollupJobs); listener.onResponse(new XPackUsageFeatureResponse(usage)); } } diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupInfoTransportActionTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupInfoTransportActionTests.java index b0881eb350d5a..243b478db6dbf 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupInfoTransportActionTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupInfoTransportActionTests.java @@ -8,18 +8,19 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.MockUtils; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.core.XPackFeatureSet; import org.elasticsearch.xpack.core.action.XPackUsageFeatureResponse; import org.elasticsearch.xpack.core.rollup.RollupFeatureSetUsage; import java.io.IOException; import java.util.concurrent.ExecutionException; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.core.Is.is; import static org.mockito.Mockito.mock; @@ -42,13 +43,15 @@ public void testUsage() throws ExecutionException, InterruptedException, IOExcep TransportService transportService = MockUtils.setupTransportServiceWithThreadpoolExecutor(threadPool); var usageAction = new RollupUsageTransportAction(transportService, null, threadPool, mock(ActionFilters.class), null); PlainActionFuture future = new PlainActionFuture<>(); - usageAction.masterOperation(null, null, null, future); - XPackFeatureSet.Usage rollupUsage = future.get().getUsage(); + usageAction.masterOperation(null, null, ClusterState.EMPTY_STATE, future); + RollupFeatureSetUsage rollupUsage = (RollupFeatureSetUsage) future.get().getUsage(); BytesStreamOutput out = new BytesStreamOutput(); rollupUsage.writeTo(out); - XPackFeatureSet.Usage serializedUsage = new RollupFeatureSetUsage(out.bytes().streamInput()); + var serializedUsage = new RollupFeatureSetUsage(out.bytes().streamInput()); assertThat(rollupUsage.name(), is(serializedUsage.name())); assertThat(rollupUsage.enabled(), is(serializedUsage.enabled())); + assertThat(rollupUsage.enabled(), is(serializedUsage.enabled())); + assertThat(rollupUsage.getNumberOfRollupJobs(), equalTo(serializedUsage.getNumberOfRollupJobs())); } } diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityApiKeyRestIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityApiKeyRestIT.java index 2f3ece56b3281..3154a5ac0cd7d 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityApiKeyRestIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityApiKeyRestIT.java @@ -108,6 +108,7 @@ public void testCrossClusterSearchWithApiKey() throws Exception { final var putRoleRequest = new Request("PUT", "/_security/role/" + REMOTE_SEARCH_ROLE); putRoleRequest.setJsonEntity(""" { + "description": "role with privileges for remote and local indices", "cluster": ["manage_own_api_key"], "indices": [ { diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityBwcRestIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityBwcRestIT.java index ccf9d66a5bc21..cbf735c66462c 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityBwcRestIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityBwcRestIT.java @@ -99,6 +99,7 @@ public void testBwcWithLegacyCrossClusterSearch() throws Exception { final var putRoleRequest = new Request("PUT", "/_security/role/" + REMOTE_SEARCH_ROLE); putRoleRequest.setJsonEntity(""" { + "description": "This description should not be sent to remote clusters.", "cluster": ["manage_own_api_key"], "indices": [ { diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityRestIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityRestIT.java index c6bb6e10f0537..6eb49ec1ab8ae 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityRestIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityRestIT.java @@ -187,6 +187,7 @@ public void testCrossClusterSearch() throws Exception { final var putRoleRequest = new Request("PUT", "/_security/role/" + REMOTE_SEARCH_ROLE); putRoleRequest.setJsonEntity(""" { + "description": "Role with privileges for remote and local indices.", "indices": [ { "names": ["local_index"], @@ -293,6 +294,7 @@ public void testCrossClusterSearch() throws Exception { final var putLocalSearchRoleRequest = new Request("PUT", "/_security/role/local_search"); putLocalSearchRoleRequest.setJsonEntity(Strings.format(""" { + "description": "Role with privileges for searching local only indices.", "indices": [ { "names": ["local_index"], diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index 65651b4a7eb65..2fc894c69aa4c 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -341,6 +341,7 @@ public class Constants { "cluster:monitor/update/health/info", "cluster:monitor/ingest/geoip/stats", "cluster:monitor/main", + "cluster:monitor/nodes/capabilities", "cluster:monitor/nodes/data_tier_usage", "cluster:monitor/nodes/hot_threads", "cluster:monitor/nodes/info", diff --git a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/SecurityOnTrialLicenseRestTestCase.java b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/SecurityOnTrialLicenseRestTestCase.java index 3ad250c4e6037..bdbd5c659c479 100644 --- a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/SecurityOnTrialLicenseRestTestCase.java +++ b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/SecurityOnTrialLicenseRestTestCase.java @@ -87,8 +87,16 @@ protected void createRole(String name, Collection clusterPrivileges) thr final RoleDescriptor role = new RoleDescriptor( name, clusterPrivileges.toArray(String[]::new), - new RoleDescriptor.IndicesPrivileges[0], - new String[0] + null, + null, + null, + null, + null, + null, + null, + null, + null, + null ); getSecurityClient().putRole(role); } diff --git a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/apikey/ApiKeyRestIT.java b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/apikey/ApiKeyRestIT.java index fc522b0213eeb..1b0d3397daa90 100644 --- a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/apikey/ApiKeyRestIT.java +++ b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/apikey/ApiKeyRestIT.java @@ -97,7 +97,7 @@ public void createUsers() throws IOException { createUser(MANAGE_API_KEY_USER, END_USER_PASSWORD, List.of("manage_api_key_role")); createRole("manage_api_key_role", Set.of("manage_api_key")); createUser(MANAGE_SECURITY_USER, END_USER_PASSWORD, List.of("manage_security_role")); - createRole("manage_security_role", Set.of("manage_security")); + createRoleWithDescription("manage_security_role", Set.of("manage_security"), "Allows all security-related operations!"); } @After @@ -1681,6 +1681,134 @@ public void testCrossClusterApiKeyAccessInResponseCanBeUsedAsInputForUpdate() th assertThat(updateResponse4.evaluate("updated"), is(false)); } + public void testUserRoleDescriptionsGetsRemoved() throws IOException { + // Creating API key whose owner's role (limited-by) has description should succeed, + // and limited-by role descriptor should be filtered to remove description. + { + final Request createRestApiKeyRequest = new Request("POST", "_security/api_key"); + setUserForRequest(createRestApiKeyRequest, MANAGE_SECURITY_USER, END_USER_PASSWORD); + createRestApiKeyRequest.setJsonEntity(""" + { + "name": "my-api-key" + } + """); + final ObjectPath createRestApiKeyResponse = assertOKAndCreateObjectPath(client().performRequest(createRestApiKeyRequest)); + String apiKeyId = createRestApiKeyResponse.evaluate("id"); + + ObjectPath fetchResponse = assertOKAndCreateObjectPath(fetchApiKeyWithUser(MANAGE_SECURITY_USER, apiKeyId, true)); + assertThat(fetchResponse.evaluate("api_keys.0.id"), equalTo(apiKeyId)); + assertThat(fetchResponse.evaluate("api_keys.0.role_descriptors"), equalTo(Map.of())); + assertThat(fetchResponse.evaluate("api_keys.0.limited_by.0.manage_security_role.description"), is(nullValue())); + + // Updating should behave the same as create. No limited-by role description should be persisted. + final Request updateRequest = new Request("PUT", "_security/api_key/" + apiKeyId); + setUserForRequest(updateRequest, MANAGE_SECURITY_USER, END_USER_PASSWORD); + updateRequest.setJsonEntity(""" + { + "role_descriptors":{ + "my-role": { + "cluster": ["all"] + } + } + } + """); + assertThat(responseAsMap(client().performRequest(updateRequest)).get("updated"), equalTo(true)); + fetchResponse = assertOKAndCreateObjectPath(fetchApiKeyWithUser(MANAGE_SECURITY_USER, apiKeyId, true)); + assertThat(fetchResponse.evaluate("api_keys.0.id"), equalTo(apiKeyId)); + assertThat(fetchResponse.evaluate("api_keys.0.limited_by.0.manage_security_role.description"), is(nullValue())); + assertThat(fetchResponse.evaluate("api_keys.0.role_descriptors.my-role.cluster"), equalTo(List.of("all"))); + } + { + final Request grantApiKeyRequest = new Request("POST", "_security/api_key/grant"); + grantApiKeyRequest.setJsonEntity(Strings.format(""" + { + "grant_type":"password", + "username":"%s", + "password":"%s", + "api_key":{ + "name":"my-granted-api-key", + "role_descriptors":{ + "my-role":{ + "cluster":["all"] + } + } + } + }""", MANAGE_SECURITY_USER, END_USER_PASSWORD)); + String grantedApiKeyId = assertOKAndCreateObjectPath(adminClient().performRequest(grantApiKeyRequest)).evaluate("id"); + var fetchResponse = assertOKAndCreateObjectPath(fetchApiKeyWithUser(MANAGE_SECURITY_USER, grantedApiKeyId, true)); + assertThat(fetchResponse.evaluate("api_keys.0.id"), equalTo(grantedApiKeyId)); + assertThat(fetchResponse.evaluate("api_keys.0.name"), equalTo("my-granted-api-key")); + assertThat(fetchResponse.evaluate("api_keys.0.limited_by.0.manage_security_role.description"), is(nullValue())); + assertThat(fetchResponse.evaluate("api_keys.0.role_descriptors.my-role.cluster"), equalTo(List.of("all"))); + } + } + + public void testCreatingApiKeyWithRoleDescriptionFails() throws IOException { + final Request createRequest = new Request("POST", "_security/api_key"); + setUserForRequest(createRequest, MANAGE_SECURITY_USER, END_USER_PASSWORD); + createRequest.setJsonEntity(""" + { + "name": "my-api-key" + } + """); + final ObjectPath createResponse = assertOKAndCreateObjectPath(client().performRequest(createRequest)); + String apiKeyId = createResponse.evaluate("id"); + + final Request updateRequest = new Request("PUT", "_security/api_key/" + apiKeyId); + setUserForRequest(updateRequest, MANAGE_SECURITY_USER, END_USER_PASSWORD); + updateRequest.setJsonEntity(""" + { + "role_descriptors":{ + "my-role": { + "description": "This description should not be allowed!" + } + } + } + """); + + var e = expectThrows(ResponseException.class, () -> client().performRequest(updateRequest)); + assertThat(e.getMessage(), containsString("failed to parse role [my-role]. unexpected field [description]")); + } + + public void testUpdatingApiKeyWithRoleDescriptionFails() throws IOException { + final Request createRestApiKeyRequest = new Request("POST", "_security/api_key"); + setUserForRequest(createRestApiKeyRequest, MANAGE_SECURITY_USER, END_USER_PASSWORD); + createRestApiKeyRequest.setJsonEntity(""" + { + "name": "my-api-key", + "role_descriptors":{ + "my-role": { + "description": "This description should not be allowed!" + } + } + } + """); + + var e = expectThrows(ResponseException.class, () -> client().performRequest(createRestApiKeyRequest)); + assertThat(e.getMessage(), containsString("failed to parse role [my-role]. unexpected field [description]")); + } + + public void testGrantApiKeyWithRoleDescriptionFails() throws Exception { + final Request grantApiKeyRequest = new Request("POST", "_security/api_key/grant"); + setUserForRequest(grantApiKeyRequest, MANAGE_SECURITY_USER, END_USER_PASSWORD); + grantApiKeyRequest.setJsonEntity(Strings.format(""" + { + "grant_type":"password", + "username":"%s", + "password":"%s", + "api_key":{ + "name":"my-granted-api-key", + "role_descriptors":{ + "my-role":{ + "description": "This role does not grant any permissions!" + } + } + } + }""", MANAGE_SECURITY_USER, END_USER_PASSWORD.toString())); + var e = expectThrows(ResponseException.class, () -> client().performRequest(grantApiKeyRequest)); + assertThat(e.getMessage(), containsString("failed to parse role [my-role]. unexpected field [description]")); + } + public void testWorkflowsRestrictionSupportForApiKeys() throws IOException { final Request createApiKeyRequest = new Request("POST", "_security/api_key"); createApiKeyRequest.setJsonEntity(""" @@ -1916,6 +2044,22 @@ private Response fetchApiKey(String apiKeyId) throws IOException { return getApiKeyResponse; } + private Response fetchApiKeyWithUser(String username, String apiKeyId, boolean withLimitedBy) throws IOException { + final Request fetchRequest; + if (randomBoolean()) { + fetchRequest = new Request("GET", "/_security/api_key"); + fetchRequest.addParameter("id", apiKeyId); + fetchRequest.addParameter("with_limited_by", String.valueOf(withLimitedBy)); + } else { + fetchRequest = new Request("GET", "/_security/_query/api_key"); + fetchRequest.addParameter("with_limited_by", String.valueOf(withLimitedBy)); + fetchRequest.setJsonEntity(Strings.format(""" + { "query": { "ids": { "values": ["%s"] } } }""", apiKeyId)); + } + setUserForRequest(fetchRequest, username, END_USER_PASSWORD); + return client().performRequest(fetchRequest); + } + private void assertBadCreateCrossClusterApiKeyRequest(String body, String expectedErrorMessage) throws IOException { final Request createRequest = new Request("POST", "/_security/cross_cluster/api_key"); createRequest.setJsonEntity(body); @@ -2178,8 +2322,27 @@ private void createRole(String name, Collection localClusterPrivileges, remoteIndicesClusterAliases ) ), + null, null ); getSecurityClient().putRole(role); } + + protected void createRoleWithDescription(String name, Collection clusterPrivileges, String description) throws IOException { + final RoleDescriptor role = new RoleDescriptor( + name, + clusterPrivileges.toArray(String[]::new), + null, + null, + null, + null, + null, + null, + null, + null, + null, + description + ); + getSecurityClient().putRole(role); + } } diff --git a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/crossclusteraccess/CrossClusterAccessHeadersForCcsRestIT.java b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/crossclusteraccess/CrossClusterAccessHeadersForCcsRestIT.java index 9402d627063c4..500b796e62660 100644 --- a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/crossclusteraccess/CrossClusterAccessHeadersForCcsRestIT.java +++ b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/crossclusteraccess/CrossClusterAccessHeadersForCcsRestIT.java @@ -102,6 +102,7 @@ public void setup() throws IOException { final var putRoleRequest = new Request("PUT", "/_security/role/" + REMOTE_SEARCH_ROLE); putRoleRequest.setJsonEntity(""" { + "description": "Grants permission for searching local and remote clusters.", "cluster": ["manage_api_key"], "indices": [ { @@ -204,7 +205,8 @@ public void testCrossClusterAccessHeadersSentSingleRemote() throws Exception { null, null, null, - null + null, + null // description is never sent across clusters ) ) ); @@ -273,6 +275,7 @@ public void testCrossClusterAccessHeadersSentMultipleRemotes() throws Exception null, null, null, + null, null ) ) @@ -305,6 +308,7 @@ public void testCrossClusterAccessHeadersSentMultipleRemotes() throws Exception null, null, null, + null, null ) ) @@ -418,6 +422,7 @@ public void testApiKeyCrossClusterAccessHeadersSentMultipleRemotes() throws Exce null, null, null, + null, null ) ), @@ -438,6 +443,7 @@ public void testApiKeyCrossClusterAccessHeadersSentMultipleRemotes() throws Exce null, null, null, + null, null ) ) @@ -466,6 +472,7 @@ public void testApiKeyCrossClusterAccessHeadersSentMultipleRemotes() throws Exce null, null, null, + null, null ) ), @@ -489,6 +496,7 @@ public void testApiKeyCrossClusterAccessHeadersSentMultipleRemotes() throws Exce null, null, null, + null, null ) ) @@ -581,6 +589,7 @@ public void testApiKeyCrossClusterAccessHeadersSentSingleRemote() throws Excepti null, null, null, + null, null ) ), @@ -601,6 +610,7 @@ public void testApiKeyCrossClusterAccessHeadersSentSingleRemote() throws Excepti null, null, null, + null, null ) ) @@ -625,6 +635,7 @@ public void testApiKeyCrossClusterAccessHeadersSentSingleRemote() throws Excepti null, null, null, + null, null ) ) @@ -713,6 +724,7 @@ public void testApiKeyCrossClusterAccessHeadersSentSingleRemote() throws Excepti null, null, null, + null, null ) ), @@ -733,6 +745,7 @@ public void testApiKeyCrossClusterAccessHeadersSentSingleRemote() throws Excepti null, null, null, + null, null ) ) @@ -757,6 +770,7 @@ public void testApiKeyCrossClusterAccessHeadersSentSingleRemote() throws Excepti null, null, null, + null, null ) ) diff --git a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/role/RoleWithDescriptionRestIT.java b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/role/RoleWithDescriptionRestIT.java new file mode 100644 index 0000000000000..95a650737d452 --- /dev/null +++ b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/role/RoleWithDescriptionRestIT.java @@ -0,0 +1,146 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security.role; + +import org.apache.http.client.methods.HttpPost; +import org.apache.http.client.methods.HttpPut; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.core.Strings; +import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; +import org.elasticsearch.xpack.core.security.support.Validation; +import org.elasticsearch.xpack.security.SecurityOnTrialLicenseRestTestCase; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; + +public class RoleWithDescriptionRestIT extends SecurityOnTrialLicenseRestTestCase { + + public void testCreateOrUpdateRoleWithDescription() throws Exception { + final String roleName = "role_with_description"; + final String initialRoleDescription = randomAlphaOfLengthBetween(0, 10); + { + Request createRoleRequest = new Request(HttpPut.METHOD_NAME, "/_security/role/" + roleName); + createRoleRequest.setJsonEntity(Strings.format(""" + { + "description": "%s", + "cluster": ["all"], + "indices": [{"names": ["*"], "privileges": ["all"]}] + }""", initialRoleDescription)); + Response createResponse = adminClient().performRequest(createRoleRequest); + assertOK(createResponse); + fetchRoleAndAssertEqualsExpected( + roleName, + new RoleDescriptor( + roleName, + new String[] { "all" }, + new RoleDescriptor.IndicesPrivileges[] { + RoleDescriptor.IndicesPrivileges.builder().indices("*").privileges("all").build() }, + null, + null, + null, + null, + null, + null, + null, + null, + initialRoleDescription + ) + ); + } + { + final String newRoleDescription = randomValueOtherThan(initialRoleDescription, () -> randomAlphaOfLengthBetween(0, 10)); + Request updateRoleRequest = new Request(HttpPost.METHOD_NAME, "/_security/role/" + roleName); + updateRoleRequest.setJsonEntity(Strings.format(""" + { + "description": "%s", + "cluster": ["all"], + "indices": [{"names": ["index-*"], "privileges": ["all"]}] + }""", newRoleDescription)); + Response updateResponse = adminClient().performRequest(updateRoleRequest); + assertOK(updateResponse); + + fetchRoleAndAssertEqualsExpected( + roleName, + new RoleDescriptor( + roleName, + new String[] { "all" }, + new RoleDescriptor.IndicesPrivileges[] { + RoleDescriptor.IndicesPrivileges.builder().indices("index-*").privileges("all").build() }, + null, + null, + null, + null, + null, + null, + null, + null, + newRoleDescription + ) + ); + } + } + + public void testCreateRoleWithInvalidDescriptionFails() { + Request createRoleRequest = new Request(HttpPut.METHOD_NAME, "/_security/role/role_with_large_description"); + createRoleRequest.setJsonEntity(Strings.format(""" + { + "description": "%s", + "cluster": ["all"], + "indices": [{"names": ["*"], "privileges": ["all"]}] + }""", randomAlphaOfLength(Validation.Roles.MAX_DESCRIPTION_LENGTH + randomIntBetween(1, 5)))); + + ResponseException e = expectThrows(ResponseException.class, () -> adminClient().performRequest(createRoleRequest)); + assertEquals(400, e.getResponse().getStatusLine().getStatusCode()); + assertThat( + e.getMessage(), + containsString("Role description must be less than " + Validation.Roles.MAX_DESCRIPTION_LENGTH + " characters.") + ); + } + + public void testUpdateRoleWithInvalidDescriptionFails() throws IOException { + Request createRoleRequest = new Request(HttpPut.METHOD_NAME, "/_security/role/my_role"); + createRoleRequest.setJsonEntity(""" + { + "cluster": ["all"], + "indices": [{"names": ["*"], "privileges": ["all"]}] + }"""); + Response createRoleResponse = adminClient().performRequest(createRoleRequest); + assertOK(createRoleResponse); + + Request updateRoleRequest = new Request(HttpPost.METHOD_NAME, "/_security/role/my_role"); + updateRoleRequest.setJsonEntity(Strings.format(""" + { + "description": "%s", + "cluster": ["all"], + "indices": [{"names": ["index-*"], "privileges": ["all"]}] + }""", randomAlphaOfLength(Validation.Roles.MAX_DESCRIPTION_LENGTH + randomIntBetween(1, 5)))); + + ResponseException e = expectThrows(ResponseException.class, () -> adminClient().performRequest(updateRoleRequest)); + assertEquals(400, e.getResponse().getStatusLine().getStatusCode()); + assertThat( + e.getMessage(), + containsString("Role description must be less than " + Validation.Roles.MAX_DESCRIPTION_LENGTH + " characters.") + ); + } + + private void fetchRoleAndAssertEqualsExpected(final String roleName, final RoleDescriptor expectedRoleDescriptor) throws IOException { + final Response getRoleResponse = adminClient().performRequest(new Request("GET", "/_security/role/" + roleName)); + assertOK(getRoleResponse); + final Map actual = responseAsParser(getRoleResponse).map( + HashMap::new, + p -> RoleDescriptor.parserBuilder().allowDescription(true).build().parse(expectedRoleDescriptor.getName(), p) + ); + assertThat(actual, equalTo(Map.of(expectedRoleDescriptor.getName(), expectedRoleDescriptor))); + } +} diff --git a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/role/RoleWithRemoteIndicesPrivilegesRestIT.java b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/role/RoleWithRemoteIndicesPrivilegesRestIT.java index 28da12b226a66..aa5967ea7277a 100644 --- a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/role/RoleWithRemoteIndicesPrivilegesRestIT.java +++ b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/role/RoleWithRemoteIndicesPrivilegesRestIT.java @@ -89,6 +89,7 @@ public void testRemoteIndexPrivileges() throws IOException { .grantedFields("field") .build() }, null, + null, null ) ); @@ -163,6 +164,7 @@ public void testRemoteIndexPrivileges() throws IOException { .grantedFields("field") .build() }, null, + null, null ) ); diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/RoleMappingFileSettingsIT.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/RoleMappingFileSettingsIT.java index 286a9cb736b1b..7c753692628cb 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/RoleMappingFileSettingsIT.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/RoleMappingFileSettingsIT.java @@ -7,13 +7,11 @@ package org.elasticsearch.integration; -import org.apache.logging.log4j.Logger; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.action.admin.indices.close.CloseIndexRequest; -import org.elasticsearch.action.admin.indices.open.OpenIndexRequest; -import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.action.admin.indices.close.CloseIndexResponse; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.metadata.ReservedStateErrorMetadata; @@ -27,15 +25,10 @@ import org.elasticsearch.reservedstate.service.FileSettingsService; import org.elasticsearch.test.NativeRealmIntegTestCase; import org.elasticsearch.xcontent.XContentParserConfiguration; -import org.elasticsearch.xpack.core.security.action.rolemapping.DeleteRoleMappingAction; -import org.elasticsearch.xpack.core.security.action.rolemapping.DeleteRoleMappingRequest; import org.elasticsearch.xpack.core.security.action.rolemapping.GetRoleMappingsAction; import org.elasticsearch.xpack.core.security.action.rolemapping.GetRoleMappingsRequest; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingAction; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequest; -import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequestBuilder; -import org.elasticsearch.xpack.core.security.authc.RealmConfig; -import org.elasticsearch.xpack.core.security.authc.support.UserRoleMapper; import org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping; import org.elasticsearch.xpack.security.action.rolemapping.ReservedRoleMappingAction; import org.junit.After; @@ -46,31 +39,25 @@ import java.nio.file.Path; import java.nio.file.StandardCopyOption; import java.util.Arrays; -import java.util.List; -import java.util.Map; -import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; -import java.util.function.Consumer; +import java.util.stream.Collectors; import static org.elasticsearch.indices.recovery.RecoverySettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING; import static org.elasticsearch.xcontent.XContentType.JSON; import static org.elasticsearch.xpack.core.security.test.TestRestrictedIndices.INTERNAL_SECURITY_MAIN_INDEX_7; import static org.hamcrest.Matchers.allOf; -import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.empty; -import static org.hamcrest.Matchers.emptyArray; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.notNullValue; -import static org.mockito.Mockito.mock; /** - * Tests that file settings service can properly add role mappings. + * Tests that file settings service can properly add role mappings and detect REST clashes + * with the reserved role mappings. */ public class RoleMappingFileSettingsIT extends NativeRealmIntegTestCase { @@ -148,21 +135,12 @@ public class RoleMappingFileSettingsIT extends NativeRealmIntegTestCase { } }"""; - @Override - protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { - Settings.Builder builder = Settings.builder() - .put(super.nodeSettings(nodeOrdinal, otherSettings)) - // some tests make use of cluster-state based role mappings - .put("xpack.security.authc.cluster_state_role_mappings.enabled", true); - return builder.build(); - } - @After public void cleanUp() { updateClusterSettings(Settings.builder().putNull("indices.recovery.max_bytes_per_sec")); } - public static void writeJSONFile(String node, String json, Logger logger, AtomicLong versionCounter) throws Exception { + private void writeJSONFile(String node, String json) throws Exception { long version = versionCounter.incrementAndGet(); FileSettingsService fileSettingsService = internalCluster().getInstance(FileSettingsService.class, node); @@ -173,11 +151,10 @@ public static void writeJSONFile(String node, String json, Logger logger, Atomic Files.createDirectories(fileSettingsService.watchedFileDir()); Path tempFilePath = createTempFile(); - logger.info("--> before writing JSON config to node {} with path {}", node, tempFilePath); + logger.info("--> writing JSON config to node {} with path {}", node, tempFilePath); logger.info(Strings.format(json, version)); Files.write(tempFilePath, Strings.format(json, version).getBytes(StandardCharsets.UTF_8)); Files.move(tempFilePath, fileSettingsService.watchedFile(), StandardCopyOption.ATOMIC_MOVE); - logger.info("--> after writing JSON config to node {} with path {}", node, tempFilePath); } private Tuple setupClusterStateListener(String node, String expectedKey) { @@ -261,41 +238,49 @@ private void assertRoleMappingsSaveOK(CountDownLatch savedClusterState, AtomicLo expectThrows(ExecutionException.class, () -> clusterAdmin().updateSettings(req).get()).getMessage() ); - for (UserRoleMapper userRoleMapper : internalCluster().getInstances(UserRoleMapper.class)) { - PlainActionFuture> resolveRolesFuture = new PlainActionFuture<>(); - userRoleMapper.resolveRoles( - new UserRoleMapper.UserData("anyUsername", null, List.of(), Map.of(), mock(RealmConfig.class)), - resolveRolesFuture - ); - assertThat(resolveRolesFuture.get(), containsInAnyOrder("kibana_user", "fleet_user")); - } - - // the role mappings are not retrievable by the role mapping action (which only accesses "native" i.e. index-based role mappings) var request = new GetRoleMappingsRequest(); request.setNames("everyone_kibana", "everyone_fleet"); var response = client().execute(GetRoleMappingsAction.INSTANCE, request).get(); - assertFalse(response.hasMappings()); - assertThat(response.mappings(), emptyArray()); + assertTrue(response.hasMappings()); + assertThat( + Arrays.stream(response.mappings()).map(r -> r.getName()).collect(Collectors.toSet()), + allOf(notNullValue(), containsInAnyOrder("everyone_kibana", "everyone_fleet")) + ); - // role mappings (with the same names) can also be stored in the "native" store - var putRoleMappingResponse = client().execute(PutRoleMappingAction.INSTANCE, sampleRestRequest("everyone_kibana")).actionGet(); - assertTrue(putRoleMappingResponse.isCreated()); - putRoleMappingResponse = client().execute(PutRoleMappingAction.INSTANCE, sampleRestRequest("everyone_fleet")).actionGet(); - assertTrue(putRoleMappingResponse.isCreated()); + // Try using the REST API to update the everyone_kibana role mapping + // This should fail, we have reserved certain role mappings in operator mode + assertEquals( + "Failed to process request " + + "[org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequest/unset] " + + "with errors: [[everyone_kibana] set as read-only by [file_settings]]", + expectThrows( + IllegalArgumentException.class, + () -> client().execute(PutRoleMappingAction.INSTANCE, sampleRestRequest("everyone_kibana")).actionGet() + ).getMessage() + ); + assertEquals( + "Failed to process request " + + "[org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequest/unset] " + + "with errors: [[everyone_fleet] set as read-only by [file_settings]]", + expectThrows( + IllegalArgumentException.class, + () -> client().execute(PutRoleMappingAction.INSTANCE, sampleRestRequest("everyone_fleet")).actionGet() + ).getMessage() + ); } public void testRoleMappingsApplied() throws Exception { ensureGreen(); var savedClusterState = setupClusterStateListener(internalCluster().getMasterName(), "everyone_kibana"); - writeJSONFile(internalCluster().getMasterName(), testJSON, logger, versionCounter); + writeJSONFile(internalCluster().getMasterName(), testJSON); assertRoleMappingsSaveOK(savedClusterState.v1(), savedClusterState.v2()); logger.info("---> cleanup cluster settings..."); savedClusterState = setupClusterStateListenerForCleanup(internalCluster().getMasterName()); - writeJSONFile(internalCluster().getMasterName(), emptyJSON, logger, versionCounter); + writeJSONFile(internalCluster().getMasterName(), emptyJSON); boolean awaitSuccessful = savedClusterState.v1().await(20, TimeUnit.SECONDS); assertTrue(awaitSuccessful); @@ -307,65 +292,32 @@ public void testRoleMappingsApplied() throws Exception { clusterStateResponse.getState().metadata().persistentSettings().get(INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING.getKey()) ); - // native role mappings are not affected by the removal of the cluster-state based ones - { - var request = new GetRoleMappingsRequest(); - request.setNames("everyone_kibana", "everyone_fleet"); - var response = client().execute(GetRoleMappingsAction.INSTANCE, request).get(); - assertTrue(response.hasMappings()); - assertThat( - Arrays.stream(response.mappings()).map(ExpressionRoleMapping::getName).toList(), - containsInAnyOrder("everyone_kibana", "everyone_fleet") - ); - } - - // and roles are resolved based on the native role mappings - for (UserRoleMapper userRoleMapper : internalCluster().getInstances(UserRoleMapper.class)) { - PlainActionFuture> resolveRolesFuture = new PlainActionFuture<>(); - userRoleMapper.resolveRoles( - new UserRoleMapper.UserData("anyUsername", null, List.of(), Map.of(), mock(RealmConfig.class)), - resolveRolesFuture - ); - assertThat(resolveRolesFuture.get(), contains("kibana_user_native")); - } - - { - var request = new DeleteRoleMappingRequest(); - request.setName("everyone_kibana"); - var response = client().execute(DeleteRoleMappingAction.INSTANCE, request).get(); - assertTrue(response.isFound()); - request = new DeleteRoleMappingRequest(); - request.setName("everyone_fleet"); - response = client().execute(DeleteRoleMappingAction.INSTANCE, request).get(); - assertTrue(response.isFound()); - } - - // no roles are resolved now, because both native and cluster-state based stores have been cleared - for (UserRoleMapper userRoleMapper : internalCluster().getInstances(UserRoleMapper.class)) { - PlainActionFuture> resolveRolesFuture = new PlainActionFuture<>(); - userRoleMapper.resolveRoles( - new UserRoleMapper.UserData("anyUsername", null, List.of(), Map.of(), mock(RealmConfig.class)), - resolveRolesFuture - ); - assertThat(resolveRolesFuture.get(), empty()); - } + var request = new GetRoleMappingsRequest(); + request.setNames("everyone_kibana", "everyone_fleet"); + var response = client().execute(GetRoleMappingsAction.INSTANCE, request).get(); + assertFalse(response.hasMappings()); } - public static Tuple setupClusterStateListenerForError( - ClusterService clusterService, - Consumer errorMetadataConsumer - ) { + private Tuple setupClusterStateListenerForError(String node) { + ClusterService clusterService = internalCluster().clusterService(node); CountDownLatch savedClusterState = new CountDownLatch(1); AtomicLong metadataVersion = new AtomicLong(-1); clusterService.addListener(new ClusterStateListener() { @Override public void clusterChanged(ClusterChangedEvent event) { ReservedStateMetadata reservedState = event.state().metadata().reservedStateMetadata().get(FileSettingsService.NAMESPACE); - if (reservedState != null && reservedState.errorMetadata() != null) { + if (reservedState != null + && reservedState.errorMetadata() != null + && reservedState.errorMetadata().errorKind() == ReservedStateErrorMetadata.ErrorKind.PARSING) { clusterService.removeListener(this); metadataVersion.set(event.state().metadata().version()); savedClusterState.countDown(); - errorMetadataConsumer.accept(reservedState.errorMetadata()); + assertEquals(ReservedStateErrorMetadata.ErrorKind.PARSING, reservedState.errorMetadata().errorKind()); + assertThat(reservedState.errorMetadata().errors(), allOf(notNullValue(), hasSize(1))); + assertThat( + reservedState.errorMetadata().errors().get(0), + containsString("failed to parse role-mapping [everyone_kibana_bad]. missing field [rules]") + ); } } }); @@ -373,13 +325,22 @@ public void clusterChanged(ClusterChangedEvent event) { return new Tuple<>(savedClusterState, metadataVersion); } + private void assertRoleMappingsNotSaved(CountDownLatch savedClusterState, AtomicLong metadataVersion) throws Exception { + boolean awaitSuccessful = savedClusterState.await(20, TimeUnit.SECONDS); + assertTrue(awaitSuccessful); + + // This should succeed, nothing was reserved + client().execute(PutRoleMappingAction.INSTANCE, sampleRestRequest("everyone_kibana_bad")).get(); + client().execute(PutRoleMappingAction.INSTANCE, sampleRestRequest("everyone_fleet_ok")).get(); + } + public void testErrorSaved() throws Exception { ensureGreen(); // save an empty file to clear any prior state, this ensures we don't get a stale file left over by another test var savedClusterState = setupClusterStateListenerForCleanup(internalCluster().getMasterName()); - writeJSONFile(internalCluster().getMasterName(), emptyJSON, logger, versionCounter); + writeJSONFile(internalCluster().getMasterName(), emptyJSON); boolean awaitSuccessful = savedClusterState.v1().await(20, TimeUnit.SECONDS); assertTrue(awaitSuccessful); @@ -392,94 +353,76 @@ public void testErrorSaved() throws Exception { ); // save a bad file - savedClusterState = setupClusterStateListenerForError( - internalCluster().getCurrentMasterNodeInstance(ClusterService.class), - errorMetadata -> { - assertEquals(ReservedStateErrorMetadata.ErrorKind.PARSING, errorMetadata.errorKind()); - assertThat(errorMetadata.errors(), allOf(notNullValue(), hasSize(1))); - assertThat( - errorMetadata.errors().get(0), - containsString("failed to parse role-mapping [everyone_kibana_bad]. missing field [rules]") - ); - } - ); + savedClusterState = setupClusterStateListenerForError(internalCluster().getMasterName()); - writeJSONFile(internalCluster().getMasterName(), testErrorJSON, logger, versionCounter); - awaitSuccessful = savedClusterState.v1().await(20, TimeUnit.SECONDS); - assertTrue(awaitSuccessful); + writeJSONFile(internalCluster().getMasterName(), testErrorJSON); + assertRoleMappingsNotSaved(savedClusterState.v1(), savedClusterState.v2()); + } - // no roles are resolved because both role mapping stores are empty - for (UserRoleMapper userRoleMapper : internalCluster().getInstances(UserRoleMapper.class)) { - PlainActionFuture> resolveRolesFuture = new PlainActionFuture<>(); - userRoleMapper.resolveRoles( - new UserRoleMapper.UserData("anyUsername", null, List.of(), Map.of(), mock(RealmConfig.class)), - resolveRolesFuture - ); - assertThat(resolveRolesFuture.get(), empty()); - } + private Tuple setupClusterStateListenerForSecurityWriteError(String node) { + ClusterService clusterService = internalCluster().clusterService(node); + CountDownLatch savedClusterState = new CountDownLatch(1); + AtomicLong metadataVersion = new AtomicLong(-1); + clusterService.addListener(new ClusterStateListener() { + @Override + public void clusterChanged(ClusterChangedEvent event) { + ReservedStateMetadata reservedState = event.state().metadata().reservedStateMetadata().get(FileSettingsService.NAMESPACE); + if (reservedState != null + && reservedState.errorMetadata() != null + && reservedState.errorMetadata().errorKind() == ReservedStateErrorMetadata.ErrorKind.VALIDATION) { + clusterService.removeListener(this); + metadataVersion.set(event.state().metadata().version()); + savedClusterState.countDown(); + assertEquals(ReservedStateErrorMetadata.ErrorKind.VALIDATION, reservedState.errorMetadata().errorKind()); + assertThat(reservedState.errorMetadata().errors(), allOf(notNullValue(), hasSize(1))); + assertThat(reservedState.errorMetadata().errors().get(0), containsString("closed")); + } + } + }); + + return new Tuple<>(savedClusterState, metadataVersion); } - public void testRoleMappingApplyWithSecurityIndexClosed() throws Exception { + public void testRoleMappingFailsToWriteToStore() throws Exception { ensureGreen(); - // expect the role mappings to apply even if the .security index is closed - var savedClusterState = setupClusterStateListener(internalCluster().getMasterName(), "everyone_kibana"); + var savedClusterState = setupClusterStateListenerForSecurityWriteError(internalCluster().getMasterName()); - try { - var closeIndexResponse = indicesAdmin().close(new CloseIndexRequest(INTERNAL_SECURITY_MAIN_INDEX_7)).get(); - assertTrue(closeIndexResponse.isAcknowledged()); - - writeJSONFile(internalCluster().getMasterName(), testJSON, logger, versionCounter); - boolean awaitSuccessful = savedClusterState.v1().await(20, TimeUnit.SECONDS); - assertTrue(awaitSuccessful); - - // no native role mappings exist - var request = new GetRoleMappingsRequest(); - request.setNames("everyone_kibana", "everyone_fleet"); - var response = client().execute(GetRoleMappingsAction.INSTANCE, request).get(); - assertFalse(response.hasMappings()); - - // cluster state settings are also applied - var clusterStateResponse = clusterAdmin().state(new ClusterStateRequest().waitForMetadataVersion(savedClusterState.v2().get())) - .get(); - assertThat( - clusterStateResponse.getState().metadata().persistentSettings().get(INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING.getKey()), - equalTo("50mb") - ); - - ReservedStateMetadata reservedState = clusterStateResponse.getState() - .metadata() - .reservedStateMetadata() - .get(FileSettingsService.NAMESPACE); - - ReservedStateHandlerMetadata handlerMetadata = reservedState.handlers().get(ReservedRoleMappingAction.NAME); - assertThat(handlerMetadata.keys(), containsInAnyOrder("everyone_kibana", "everyone_fleet")); - - // and roles are resolved based on the cluster-state role mappings - for (UserRoleMapper userRoleMapper : internalCluster().getInstances(UserRoleMapper.class)) { - PlainActionFuture> resolveRolesFuture = new PlainActionFuture<>(); - userRoleMapper.resolveRoles( - new UserRoleMapper.UserData("anyUsername", null, List.of(), Map.of(), mock(RealmConfig.class)), - resolveRolesFuture - ); - assertThat(resolveRolesFuture.get(), containsInAnyOrder("kibana_user", "fleet_user")); - } - } finally { - savedClusterState = setupClusterStateListenerForCleanup(internalCluster().getMasterName()); - writeJSONFile(internalCluster().getMasterName(), emptyJSON, logger, versionCounter); - boolean awaitSuccessful = savedClusterState.v1().await(20, TimeUnit.SECONDS); - assertTrue(awaitSuccessful); - - var openIndexResponse = indicesAdmin().open(new OpenIndexRequest(INTERNAL_SECURITY_MAIN_INDEX_7)).get(); - assertTrue(openIndexResponse.isAcknowledged()); - } + final CloseIndexResponse closeIndexResponse = indicesAdmin().close(new CloseIndexRequest(INTERNAL_SECURITY_MAIN_INDEX_7)).get(); + assertTrue(closeIndexResponse.isAcknowledged()); + + writeJSONFile(internalCluster().getMasterName(), testJSON); + boolean awaitSuccessful = savedClusterState.v1().await(20, TimeUnit.SECONDS); + assertTrue(awaitSuccessful); + + var request = new GetRoleMappingsRequest(); + request.setNames("everyone_kibana", "everyone_fleet"); + + var response = client().execute(GetRoleMappingsAction.INSTANCE, request).get(); + assertFalse(response.hasMappings()); + + final ClusterStateResponse clusterStateResponse = clusterAdmin().state( + new ClusterStateRequest().waitForMetadataVersion(savedClusterState.v2().get()) + ).get(); + + assertNull( + clusterStateResponse.getState().metadata().persistentSettings().get(INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING.getKey()) + ); + + ReservedStateMetadata reservedState = clusterStateResponse.getState() + .metadata() + .reservedStateMetadata() + .get(FileSettingsService.NAMESPACE); + + ReservedStateHandlerMetadata handlerMetadata = reservedState.handlers().get(ReservedRoleMappingAction.NAME); + assertTrue(handlerMetadata == null || handlerMetadata.keys().isEmpty()); } private PutRoleMappingRequest sampleRestRequest(String name) throws Exception { var json = """ { - "enabled": true, - "roles": [ "kibana_user_native" ], + "enabled": false, + "roles": [ "kibana_user" ], "rules": { "field": { "username": "*" } }, "metadata": { "uuid" : "b9a59ba9-6b92-4be2-bb8d-02bb270cb3a7" @@ -490,7 +433,8 @@ private PutRoleMappingRequest sampleRestRequest(String name) throws Exception { var bis = new ByteArrayInputStream(json.getBytes(StandardCharsets.UTF_8)); var parser = JSON.xContent().createParser(XContentParserConfiguration.EMPTY, bis) ) { - return new PutRoleMappingRequestBuilder(null).source(name, parser).request(); + ExpressionRoleMapping mapping = ExpressionRoleMapping.parse(name, parser); + return PutRoleMappingRequest.fromMapping(mapping); } } } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/FileSettingsRoleMappingsStartupIT.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/FileSettingsRoleMappingsStartupIT.java new file mode 100644 index 0000000000000..48e97b7afb897 --- /dev/null +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/FileSettingsRoleMappingsStartupIT.java @@ -0,0 +1,148 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security; + +import org.elasticsearch.analysis.common.CommonAnalysisPlugin; +import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.cluster.ClusterStateListener; +import org.elasticsearch.cluster.metadata.ReservedStateErrorMetadata; +import org.elasticsearch.cluster.metadata.ReservedStateMetadata; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.core.Strings; +import org.elasticsearch.core.Tuple; +import org.elasticsearch.index.mapper.extras.MapperExtrasPlugin; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.reindex.ReindexPlugin; +import org.elasticsearch.reservedstate.service.FileSettingsService; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; +import org.elasticsearch.test.SecurityIntegTestCase; +import org.elasticsearch.test.junit.annotations.TestLogging; +import org.elasticsearch.transport.netty4.Netty4Plugin; +import org.elasticsearch.xpack.wildcard.Wildcard; + +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.StandardCopyOption; +import java.util.Arrays; +import java.util.Collection; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicLong; + +import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.notNullValue; + +@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0, autoManageMasterNodes = false) +public class FileSettingsRoleMappingsStartupIT extends SecurityIntegTestCase { + + private static AtomicLong versionCounter = new AtomicLong(1); + private static String testJSONForFailedCase = """ + { + "metadata": { + "version": "%s", + "compatibility": "8.4.0" + }, + "state": { + "role_mappings": { + "everyone_kibana_2": { + "enabled": true, + "roles": [ "kibana_user" ], + "rules": { "field": { "username": "*" } }, + "metadata": { + "uuid" : "b9a59ba9-6b92-4be2-bb8d-02bb270cb3a7", + "_foo": "something" + } + } + } + } + }"""; + + @Override + protected void doAssertXPackIsInstalled() {} + + @Override + protected Path nodeConfigPath(int nodeOrdinal) { + return null; + } + + private void writeJSONFile(String node, String json) throws Exception { + long version = versionCounter.incrementAndGet(); + + FileSettingsService fileSettingsService = internalCluster().getInstance(FileSettingsService.class, node); + + Files.deleteIfExists(fileSettingsService.watchedFile()); + + Files.createDirectories(fileSettingsService.watchedFileDir()); + Path tempFilePath = createTempFile(); + + logger.info("--> writing JSON config to node {} with path {}", node, tempFilePath); + logger.info(Strings.format(json, version)); + Files.write(tempFilePath, Strings.format(json, version).getBytes(StandardCharsets.UTF_8)); + Files.move(tempFilePath, fileSettingsService.watchedFile(), StandardCopyOption.ATOMIC_MOVE); + } + + private Tuple setupClusterStateListenerForError(String node) { + ClusterService clusterService = internalCluster().clusterService(node); + CountDownLatch savedClusterState = new CountDownLatch(1); + AtomicLong metadataVersion = new AtomicLong(-1); + clusterService.addListener(new ClusterStateListener() { + @Override + public void clusterChanged(ClusterChangedEvent event) { + ReservedStateMetadata reservedState = event.state().metadata().reservedStateMetadata().get(FileSettingsService.NAMESPACE); + if (reservedState != null && reservedState.errorMetadata() != null) { + assertEquals(ReservedStateErrorMetadata.ErrorKind.VALIDATION, reservedState.errorMetadata().errorKind()); + assertThat(reservedState.errorMetadata().errors(), allOf(notNullValue(), hasSize(1))); + assertThat(reservedState.errorMetadata().errors().get(0), containsString("Fake exception")); + clusterService.removeListener(this); + metadataVersion.set(event.state().metadata().version()); + savedClusterState.countDown(); + } else if (reservedState != null) { + logger.debug(() -> "Got reserved state update without error metadata: " + reservedState); + } else { + logger.debug(() -> "Got cluster state update: " + event.source()); + } + } + }); + + return new Tuple<>(savedClusterState, metadataVersion); + } + + @TestLogging( + value = "org.elasticsearch.common.file:DEBUG,org.elasticsearch.xpack.security:DEBUG,org.elasticsearch.cluster.metadata:DEBUG", + reason = "https://github.com/elastic/elasticsearch/issues/98391" + ) + public void testFailsOnStartMasterNodeWithError() throws Exception { + internalCluster().setBootstrapMasterNodeIndex(0); + + internalCluster().startMasterOnlyNode(); + + logger.info("--> write some role mappings, no other file settings"); + writeJSONFile(internalCluster().getMasterName(), testJSONForFailedCase); + var savedClusterState = setupClusterStateListenerForError(internalCluster().getMasterName()); + + boolean awaitSuccessful = savedClusterState.v1().await(20, TimeUnit.SECONDS); + assertTrue(awaitSuccessful); + } + + public Collection> nodePlugins() { + return Arrays.asList( + UnstableLocalStateSecurity.class, + Netty4Plugin.class, + ReindexPlugin.class, + CommonAnalysisPlugin.class, + InternalSettingsPlugin.class, + MapperExtrasPlugin.class, + Wildcard.class + ); + } + +} diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java index 351cf05b2096d..58d6657b99e32 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java @@ -85,7 +85,7 @@ import org.elasticsearch.xpack.core.security.authc.RealmDomain; import org.elasticsearch.xpack.core.security.authc.file.FileRealmSettings; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; -import org.elasticsearch.xpack.core.security.authz.RoleDescriptorTests; +import org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper; import org.elasticsearch.xpack.core.security.authz.RoleDescriptorsIntersection; import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilegeResolver; import org.elasticsearch.xpack.core.security.authz.store.ReservedRolesStore; @@ -2551,11 +2551,11 @@ public void testUpdateApiKeysNoopScenarios() throws Exception { final List newRoleDescriptors = List.of( randomValueOtherThanMany( rd -> RoleDescriptorRequestValidator.validate(rd) != null || initialRequest.getRoleDescriptors().contains(rd), - () -> RoleDescriptorTests.randomRoleDescriptor(false) + () -> RoleDescriptorTestHelper.builder().build() ), randomValueOtherThanMany( rd -> RoleDescriptorRequestValidator.validate(rd) != null || initialRequest.getRoleDescriptors().contains(rd), - () -> RoleDescriptorTests.randomRoleDescriptor(false) + () -> RoleDescriptorTestHelper.builder().build() ) ); response = updateSingleApiKeyMaybeUsingBulkAction( @@ -2769,7 +2769,7 @@ private List randomRoleDescriptors() { new RoleDescriptor(randomAlphaOfLength(10), new String[] { "all" }, null, null), randomValueOtherThanMany( rd -> RoleDescriptorRequestValidator.validate(rd) != null, - () -> RoleDescriptorTests.randomRoleDescriptor(false, true, false, true) + () -> RoleDescriptorTestHelper.builder().allowRemoteIndices(true).allowRemoteClusters(true).build() ) ); case 2 -> null; @@ -2887,6 +2887,7 @@ private void expectRoleDescriptorsForApiKey( final var descriptor = (Map) rawRoleDescriptor.get(expectedRoleDescriptor.getName()); final var roleDescriptor = RoleDescriptor.parserBuilder() .allowRestriction(true) + .allowDescription(true) .build() .parse( expectedRoleDescriptor.getName(), diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmIntegTests.java index 9d56528a060c3..ce4c8719f0642 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmIntegTests.java @@ -223,6 +223,7 @@ private void testAddAndGetRole(String roleName) { new BytesArray("{\"match_all\": {}}"), randomBoolean() ) + .description(randomAlphaOfLengthBetween(5, 20)) .metadata(metadata) .get(); logger.error("--> waiting for .security index"); @@ -245,6 +246,7 @@ private void testAddAndGetRole(String roleName) { new BytesArray("{\"match_all\": {}}"), randomBoolean() ) + .description(randomAlphaOfLengthBetween(5, 20)) .get(); preparePutRole("test_role3").cluster("all", "none") .runAs("root", "nobody") @@ -256,6 +258,7 @@ private void testAddAndGetRole(String roleName) { new BytesArray("{\"match_all\": {}}"), randomBoolean() ) + .description(randomAlphaOfLengthBetween(5, 20)) .get(); logger.info("--> retrieving all roles"); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index 0ff4f1160af56..ef08f855a46cc 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -1103,7 +1103,8 @@ Collection createComponents( new SecurityUsageServices(realms, allRolesStore, nativeRoleMappingStore, ipFilter.get(), profileService, apiKeyService) ); - reservedRoleMappingAction.set(new ReservedRoleMappingAction()); + reservedRoleMappingAction.set(new ReservedRoleMappingAction(nativeRoleMappingStore)); + systemIndices.getMainIndexManager().onStateRecovered(state -> reservedRoleMappingAction.get().securityIndexRecovered()); cacheInvalidatorRegistry.validate(); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/ReservedRoleMappingAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/ReservedRoleMappingAction.java index 73d1a1abcdb50..852887767578f 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/ReservedRoleMappingAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/ReservedRoleMappingAction.java @@ -7,18 +7,24 @@ package org.elasticsearch.xpack.security.action.rolemapping; -import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.GroupedActionListener; +import org.elasticsearch.common.util.concurrent.ListenableFuture; +import org.elasticsearch.reservedstate.NonStateTransformResult; import org.elasticsearch.reservedstate.ReservedClusterStateHandler; import org.elasticsearch.reservedstate.TransformState; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xpack.core.security.action.rolemapping.DeleteRoleMappingRequest; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequest; -import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequestBuilder; import org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping; -import org.elasticsearch.xpack.core.security.authz.RoleMappingMetadata; +import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; import java.io.IOException; import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -32,59 +38,123 @@ * It is used by the ReservedClusterStateService to add/update or remove role mappings. Typical usage * for this action is in the context of file based settings. */ -public class ReservedRoleMappingAction implements ReservedClusterStateHandler> { +public class ReservedRoleMappingAction implements ReservedClusterStateHandler> { public static final String NAME = "role_mappings"; + private final NativeRoleMappingStore roleMappingStore; + private final ListenableFuture securityIndexRecoveryListener = new ListenableFuture<>(); + + /** + * Creates a ReservedRoleMappingAction + * + * @param roleMappingStore requires {@link NativeRoleMappingStore} for storing/deleting the mappings + */ + public ReservedRoleMappingAction(NativeRoleMappingStore roleMappingStore) { + this.roleMappingStore = roleMappingStore; + } + @Override public String name() { return NAME; } + private static Collection prepare(List roleMappings) { + List requests = roleMappings.stream().map(rm -> PutRoleMappingRequest.fromMapping(rm)).toList(); + + var exceptions = new ArrayList(); + for (var request : requests) { + // File based defined role mappings are allowed to use MetadataUtils.RESERVED_PREFIX + var exception = request.validate(false); + if (exception != null) { + exceptions.add(exception); + } + } + + if (exceptions.isEmpty() == false) { + var illegalArgumentException = new IllegalArgumentException("error on validating put role mapping requests"); + exceptions.forEach(illegalArgumentException::addSuppressed); + throw illegalArgumentException; + } + + return requests; + } + @Override public TransformState transform(Object source, TransformState prevState) throws Exception { + // We execute the prepare() call to catch any errors in the transform phase. + // Since we store the role mappings outside the cluster state, we do the actual save with a + // non cluster state transform call. @SuppressWarnings("unchecked") - Set roleMappings = validate((List) source); - RoleMappingMetadata newRoleMappingMetadata = new RoleMappingMetadata(roleMappings); - if (newRoleMappingMetadata.equals(RoleMappingMetadata.getFromClusterState(prevState.state()))) { - return prevState; - } else { - ClusterState newState = newRoleMappingMetadata.updateClusterState(prevState.state()); - Set entities = newRoleMappingMetadata.getRoleMappings() - .stream() - .map(ExpressionRoleMapping::getName) - .collect(Collectors.toSet()); - return new TransformState(newState, entities); + var requests = prepare((List) source); + return new TransformState( + prevState.state(), + prevState.keys(), + l -> securityIndexRecoveryListener.addListener( + ActionListener.wrap(ignored -> nonStateTransform(requests, prevState, l), l::onFailure) + ) + ); + } + + // Exposed for testing purposes + protected void nonStateTransform( + Collection requests, + TransformState prevState, + ActionListener listener + ) { + Set entities = requests.stream().map(r -> r.getName()).collect(Collectors.toSet()); + Set toDelete = new HashSet<>(prevState.keys()); + toDelete.removeAll(entities); + + final int tasksCount = requests.size() + toDelete.size(); + + // Nothing to do, don't start a group listener with 0 actions + if (tasksCount == 0) { + listener.onResponse(new NonStateTransformResult(ReservedRoleMappingAction.NAME, Set.of())); + return; + } + + GroupedActionListener taskListener = new GroupedActionListener<>(tasksCount, new ActionListener<>() { + @Override + public void onResponse(Collection booleans) { + listener.onResponse(new NonStateTransformResult(ReservedRoleMappingAction.NAME, Collections.unmodifiableSet(entities))); + } + + @Override + public void onFailure(Exception e) { + listener.onFailure(e); + } + }); + + for (var request : requests) { + roleMappingStore.putRoleMapping(request, taskListener); + } + + for (var mappingToDelete : toDelete) { + var deleteRequest = new DeleteRoleMappingRequest(); + deleteRequest.setName(mappingToDelete); + roleMappingStore.deleteRoleMapping(deleteRequest, taskListener); } } @Override - public List fromXContent(XContentParser parser) throws IOException { - List result = new ArrayList<>(); + public List fromXContent(XContentParser parser) throws IOException { + List result = new ArrayList<>(); + Map source = parser.map(); + for (String name : source.keySet()) { @SuppressWarnings("unchecked") Map content = (Map) source.get(name); try (XContentParser mappingParser = mapToXContentParser(XContentParserConfiguration.EMPTY, content)) { - result.add(new PutRoleMappingRequestBuilder(null).source(name, mappingParser).request()); + ExpressionRoleMapping mapping = ExpressionRoleMapping.parse(name, mappingParser); + result.add(mapping); } } + return result; } - private Set validate(List roleMappings) { - var exceptions = new ArrayList(); - for (var roleMapping : roleMappings) { - // File based defined role mappings are allowed to use MetadataUtils.RESERVED_PREFIX - var exception = roleMapping.validate(false); - if (exception != null) { - exceptions.add(exception); - } - } - if (exceptions.isEmpty() == false) { - var illegalArgumentException = new IllegalArgumentException("error on validating put role mapping requests"); - exceptions.forEach(illegalArgumentException::addSuppressed); - throw illegalArgumentException; - } - return roleMappings.stream().map(PutRoleMappingRequest::getMapping).collect(Collectors.toUnmodifiableSet()); + public void securityIndexRecovered() { + securityIndexRecoveryListener.onResponse(null); } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingAction.java index b4e8d5d6db83f..811d357b89f89 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingAction.java @@ -8,9 +8,9 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.action.support.ReservedStateAwareHandledTransportAction; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.rolemapping.DeleteRoleMappingAction; @@ -18,7 +18,12 @@ import org.elasticsearch.xpack.core.security.action.rolemapping.DeleteRoleMappingResponse; import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; -public class TransportDeleteRoleMappingAction extends HandledTransportAction { +import java.util.Optional; +import java.util.Set; + +public class TransportDeleteRoleMappingAction extends ReservedStateAwareHandledTransportAction< + DeleteRoleMappingRequest, + DeleteRoleMappingResponse> { private final NativeRoleMappingStore roleMappingStore; @@ -26,20 +31,25 @@ public class TransportDeleteRoleMappingAction extends HandledTransportAction listener) { + protected void doExecuteProtected(Task task, DeleteRoleMappingRequest request, ActionListener listener) { roleMappingStore.deleteRoleMapping(request, listener.safeMap(DeleteRoleMappingResponse::new)); } + + @Override + public Optional reservedStateHandlerName() { + return Optional.of(ReservedRoleMappingAction.NAME); + } + + @Override + public Set modifiedKeys(DeleteRoleMappingRequest request) { + return Set.of(request.getName()); + } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingAction.java index 44c72bc13a54b..5e32e4f903f81 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingAction.java @@ -8,9 +8,9 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.action.support.ReservedStateAwareHandledTransportAction; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingAction; @@ -18,7 +18,10 @@ import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingResponse; import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; -public class TransportPutRoleMappingAction extends HandledTransportAction { +import java.util.Optional; +import java.util.Set; + +public class TransportPutRoleMappingAction extends ReservedStateAwareHandledTransportAction { private final NativeRoleMappingStore roleMappingStore; @@ -26,17 +29,32 @@ public class TransportPutRoleMappingAction extends HandledTransportAction listener) { + protected void doExecuteProtected( + Task task, + final PutRoleMappingRequest request, + final ActionListener listener + ) { roleMappingStore.putRoleMapping( request, ActionListener.wrap(created -> listener.onResponse(new PutRoleMappingResponse(created)), listener::onFailure) ); } + + @Override + public Optional reservedStateHandlerName() { + return Optional.of(ReservedRoleMappingAction.NAME); + } + + @Override + public Set modifiedKeys(PutRoleMappingRequest request) { + return Set.of(request.getName()); + } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java index 6d76fac71e900..55a89e184f84f 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java @@ -360,8 +360,9 @@ && hasRemoteIndices(request.getRoleDescriptors())) { return; } + final Set userRolesWithoutDescription = removeUserRoleDescriptorDescriptions(userRoleDescriptors); final Set filteredUserRoleDescriptors = maybeRemoveRemotePrivileges( - userRoleDescriptors, + userRolesWithoutDescription, transportVersion, request.getId() ); @@ -370,6 +371,28 @@ && hasRemoteIndices(request.getRoleDescriptors())) { } } + private Set removeUserRoleDescriptorDescriptions(Set userRoleDescriptors) { + return userRoleDescriptors.stream().map(roleDescriptor -> { + if (roleDescriptor.hasDescription()) { + return new RoleDescriptor( + roleDescriptor.getName(), + roleDescriptor.getClusterPrivileges(), + roleDescriptor.getIndicesPrivileges(), + roleDescriptor.getApplicationPrivileges(), + roleDescriptor.getConditionalClusterPrivileges(), + roleDescriptor.getRunAs(), + roleDescriptor.getMetadata(), + roleDescriptor.getTransientMetadata(), + roleDescriptor.getRemoteIndicesPrivileges(), + roleDescriptor.getRemoteClusterPermissions(), + roleDescriptor.getRestriction(), + null + ); + } + return roleDescriptor; + }).collect(Collectors.toSet()); + } + private TransportVersion getMinTransportVersion() { return clusterService.state().getMinTransportVersion(); } @@ -534,8 +557,9 @@ public void updateApiKeys( } final String[] apiKeyIds = request.getIds().toArray(String[]::new); + final Set userRolesWithoutDescription = removeUserRoleDescriptorDescriptions(userRoleDescriptors); final Set filteredUserRoleDescriptors = maybeRemoveRemotePrivileges( - userRoleDescriptors, + userRolesWithoutDescription, transportVersion, apiKeyIds ); @@ -673,7 +697,8 @@ static Set maybeRemoveRemotePrivileges( roleDescriptor.hasRemoteClusterPermissions() && transportVersion.before(ROLE_REMOTE_CLUSTER_PRIVS) ? null : roleDescriptor.getRemoteClusterPermissions(), - roleDescriptor.getRestriction() + roleDescriptor.getRestriction(), + roleDescriptor.getDescription() ); } return roleDescriptor; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/FileRolesStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/FileRolesStore.java index 71a78c1627946..7618135c8662f 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/FileRolesStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/FileRolesStore.java @@ -67,7 +67,10 @@ public class FileRolesStore implements BiConsumer, ActionListener, ActionListener< private static final Logger logger = LogManager.getLogger(NativeRolesStore.class); - private static final RoleDescriptor.Parser ROLE_DESCRIPTOR_PARSER = RoleDescriptor.parserBuilder().allow2xFormat(true).build(); + private static final RoleDescriptor.Parser ROLE_DESCRIPTOR_PARSER = RoleDescriptor.parserBuilder() + .allow2xFormat(true) + .allowDescription(true) + .build(); private final Settings settings; private final Client client; @@ -272,9 +276,18 @@ public void putRole(final PutRoleRequest request, final RoleDescriptor role, fin "all nodes must have version [" + ROLE_REMOTE_CLUSTER_PRIVS + "] or higher to support remote cluster privileges" ) ); - } else { - innerPutRole(request, role, listener); - } + } else if (role.hasDescription() + && clusterService.state().getMinTransportVersion().before(TransportVersions.SECURITY_ROLE_DESCRIPTION)) { + listener.onFailure( + new IllegalStateException( + "all nodes must have version [" + + TransportVersions.SECURITY_ROLE_DESCRIPTION.toReleaseVersion() + + "] or higher to support specifying role description" + ) + ); + } else { + innerPutRole(request, role, listener); + } } // pkg-private for testing @@ -535,7 +548,8 @@ static RoleDescriptor transformRole(String id, BytesReference sourceBytes, Logge transientMap, roleDescriptor.getRemoteIndicesPrivileges(), roleDescriptor.getRemoteClusterPermissions(), - roleDescriptor.getRestriction() + roleDescriptor.getRestriction(), + roleDescriptor.getDescription() ); } else { return roleDescriptor; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/rolemapping/RestPutRoleMappingAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/rolemapping/RestPutRoleMappingAction.java index 55562c8ee0138..e7e24037543fa 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/rolemapping/RestPutRoleMappingAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/rolemapping/RestPutRoleMappingAction.java @@ -8,8 +8,6 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; -import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestRequest; @@ -19,7 +17,6 @@ import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestBuilderListener; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequestBuilder; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingResponse; @@ -60,18 +57,12 @@ public String getName() { @Override public RestChannelConsumer innerPrepareRequest(RestRequest request, NodeClient client) throws IOException { - String name = request.param("name"); - String refresh = request.param("refresh"); - PutRoleMappingRequestBuilder requestBuilder; - try ( - XContentParser parser = XContentHelper.createParserNotCompressed( - LoggingDeprecationHandler.XCONTENT_PARSER_CONFIG, - request.requiredContent(), - request.getXContentType() - ) - ) { - requestBuilder = new PutRoleMappingRequestBuilder(client).source(name, parser).setRefreshPolicy(refresh); - } + final String name = request.param("name"); + PutRoleMappingRequestBuilder requestBuilder = new PutRoleMappingRequestBuilder(client).source( + name, + request.requiredContent(), + request.getXContentType() + ).setRefreshPolicy(request.param("refresh")); return channel -> requestBuilder.execute(new RestBuilderListener<>(channel) { @Override public RestResponse buildResponse(PutRoleMappingResponse response, XContentBuilder builder) throws Exception { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecuritySystemIndices.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecuritySystemIndices.java index ed198834d24f1..9e20cb05a3cdc 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecuritySystemIndices.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecuritySystemIndices.java @@ -14,6 +14,7 @@ import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.VersionId; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.IndexSettings; @@ -23,9 +24,12 @@ import java.io.IOException; import java.io.UncheckedIOException; +import java.util.Arrays; import java.util.Collection; +import java.util.Comparator; import java.util.List; import java.util.concurrent.atomic.AtomicBoolean; +import java.util.function.Function; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.xpack.core.ClientHelper.SECURITY_ORIGIN; @@ -38,7 +42,6 @@ public class SecuritySystemIndices { public static final int INTERNAL_MAIN_INDEX_FORMAT = 6; - public static final int INTERNAL_MAIN_INDEX_MAPPINGS_FORMAT = 1; private static final int INTERNAL_TOKENS_INDEX_FORMAT = 7; private static final int INTERNAL_TOKENS_INDEX_MAPPINGS_FORMAT = 1; private static final int INTERNAL_PROFILE_INDEX_FORMAT = 8; @@ -119,18 +122,22 @@ private void checkInitialized() { } private SystemIndexDescriptor getSecurityMainIndexDescriptor() { - return SystemIndexDescriptor.builder() - // This can't just be `.security-*` because that would overlap with the tokens index pattern - .setIndexPattern(".security-[0-9]+*") - .setPrimaryIndex(MAIN_INDEX_CONCRETE_NAME) - .setDescription("Contains Security configuration") - .setMappings(getMainIndexMappings()) - .setSettings(getMainIndexSettings()) - .setAliasName(SECURITY_MAIN_ALIAS) - .setIndexFormat(INTERNAL_MAIN_INDEX_FORMAT) - .setVersionMetaKey(SECURITY_VERSION_STRING) - .setOrigin(SECURITY_ORIGIN) - .setThreadPools(ExecutorNames.CRITICAL_SYSTEM_INDEX_THREAD_POOLS) + final Function securityIndexDescriptorBuilder = + mappingVersion -> SystemIndexDescriptor.builder() + // This can't just be `.security-*` because that would overlap with the tokens index pattern + .setIndexPattern(".security-[0-9]+*") + .setPrimaryIndex(MAIN_INDEX_CONCRETE_NAME) + .setDescription("Contains Security configuration") + .setMappings(getMainIndexMappings(mappingVersion)) + .setSettings(getMainIndexSettings()) + .setAliasName(SECURITY_MAIN_ALIAS) + .setIndexFormat(INTERNAL_MAIN_INDEX_FORMAT) + .setVersionMetaKey(SECURITY_VERSION_STRING) + .setOrigin(SECURITY_ORIGIN) + .setThreadPools(ExecutorNames.CRITICAL_SYSTEM_INDEX_THREAD_POOLS); + + return securityIndexDescriptorBuilder.apply(SecurityMainIndexMappingVersion.latest()) + .setPriorSystemIndexDescriptors(List.of(securityIndexDescriptorBuilder.apply(SecurityMainIndexMappingVersion.INITIAL).build())) .build(); } @@ -149,14 +156,14 @@ private static Settings getMainIndexSettings() { .build(); } - private XContentBuilder getMainIndexMappings() { + private XContentBuilder getMainIndexMappings(SecurityMainIndexMappingVersion mappingVersion) { try { final XContentBuilder builder = jsonBuilder(); builder.startObject(); { builder.startObject("_meta"); builder.field(SECURITY_VERSION_STRING, BWC_MAPPINGS_VERSION); // Only needed for BWC with pre-8.15.0 nodes - builder.field(SystemIndexDescriptor.VERSION_META_KEY, INTERNAL_MAIN_INDEX_MAPPINGS_FORMAT); + builder.field(SystemIndexDescriptor.VERSION_META_KEY, mappingVersion.id); builder.endObject(); builder.field("dynamic", "strict"); @@ -304,22 +311,24 @@ private XContentBuilder getMainIndexMappings() { } builder.endObject(); - builder.startObject("remote_cluster"); - { - builder.field("type", "object"); - builder.startObject("properties"); + if (mappingVersion.onOrAfter(SecurityMainIndexMappingVersion.ADD_REMOTE_CLUSTER_AND_DESCRIPTION_FIELDS)) { + builder.startObject("remote_cluster"); { - builder.startObject("clusters"); - builder.field("type", "keyword"); - builder.endObject(); + builder.field("type", "object"); + builder.startObject("properties"); + { + builder.startObject("clusters"); + builder.field("type", "keyword"); + builder.endObject(); - builder.startObject("privileges"); - builder.field("type", "keyword"); + builder.startObject("privileges"); + builder.field("type", "keyword"); + builder.endObject(); + } builder.endObject(); } builder.endObject(); } - builder.endObject(); builder.startObject("applications"); { @@ -402,6 +411,12 @@ private XContentBuilder getMainIndexMappings() { builder.field("type", "keyword"); builder.endObject(); + if (mappingVersion.onOrAfter(SecurityMainIndexMappingVersion.ADD_REMOTE_CLUSTER_AND_DESCRIPTION_FIELDS)) { + builder.startObject("description"); + builder.field("type", "text"); + builder.endObject(); + } + builder.startObject("run_as"); builder.field("type", "keyword"); builder.endObject(); @@ -1010,4 +1025,46 @@ private static void defineRealmDomain(XContentBuilder builder, String fieldName) builder.endObject(); } + /** + * Every change to the mapping of .security index must be versioned. When adding a new mapping version: + *
    + *
  • pick the next largest version ID - this will automatically become the new {@link #latest()} version
  • + *
  • add your mapping change in {@link #getMainIndexMappings(SecurityMainIndexMappingVersion)} conditionally to a new version
  • + *
  • make sure to set old latest version to "prior system index descriptors" in {@link #getSecurityMainIndexDescriptor()}
  • + *
+ */ + public enum SecurityMainIndexMappingVersion implements VersionId { + + /** + * Initial .security index mapping version. + */ + INITIAL(1), + + /** + * The mapping was changed to add new text description and remote_cluster fields. + */ + ADD_REMOTE_CLUSTER_AND_DESCRIPTION_FIELDS(2), + + ; + + private static final SecurityMainIndexMappingVersion LATEST = Arrays.stream(values()) + .max(Comparator.comparingInt(v -> v.id)) + .orElseThrow(); + + private final int id; + + SecurityMainIndexMappingVersion(int id) { + assert id > 0; + this.id = id; + } + + @Override + public int id() { + return id; + } + + public static SecurityMainIndexMappingVersion latest() { + return LATEST; + } + } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/TestSecurityClient.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/TestSecurityClient.java index e8eb50e3a6529..a7014ece93ae5 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/TestSecurityClient.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/TestSecurityClient.java @@ -212,7 +212,7 @@ private Map getRoleDescriptors(String roleParameter) thr XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser); final String roleName = parser.currentName(); XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); - final RoleDescriptor role = RoleDescriptor.parserBuilder().build().parse(roleName, parser); + final RoleDescriptor role = RoleDescriptor.parserBuilder().allowDescription(true).build().parse(roleName, parser); roles.put(roleName, role); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/LocalReservedUnstableSecurityStateHandlerProvider.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/LocalReservedUnstableSecurityStateHandlerProvider.java new file mode 100644 index 0000000000000..b4a07093e49c3 --- /dev/null +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/LocalReservedUnstableSecurityStateHandlerProvider.java @@ -0,0 +1,28 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security; + +import org.elasticsearch.reservedstate.ReservedClusterStateHandlerProvider; + +/** + * Mock Security Provider implementation for the {@link ReservedClusterStateHandlerProvider} service interface. This is used + * for {@link org.elasticsearch.test.ESIntegTestCase} because the Security Plugin is really LocalStateSecurity in those tests. + *

+ * Unlike {@link LocalReservedSecurityStateHandlerProvider} this implementation is mocked to implement the + * {@link UnstableLocalStateSecurity}. Separate implementation is needed, because the SPI creation code matches the constructor + * signature when instantiating. E.g. we need to match {@link UnstableLocalStateSecurity} instead of {@link LocalStateSecurity} + */ +public class LocalReservedUnstableSecurityStateHandlerProvider extends LocalReservedSecurityStateHandlerProvider { + public LocalReservedUnstableSecurityStateHandlerProvider() { + throw new IllegalStateException("Provider must be constructed using PluginsService"); + } + + public LocalReservedUnstableSecurityStateHandlerProvider(UnstableLocalStateSecurity plugin) { + super(plugin); + } +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/UnstableLocalStateSecurity.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/UnstableLocalStateSecurity.java new file mode 100644 index 0000000000000..5621bdced15b3 --- /dev/null +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/UnstableLocalStateSecurity.java @@ -0,0 +1,97 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.reservedstate.NonStateTransformResult; +import org.elasticsearch.reservedstate.ReservedClusterStateHandler; +import org.elasticsearch.reservedstate.TransformState; +import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequest; +import org.elasticsearch.xpack.core.ssl.SSLService; +import org.elasticsearch.xpack.security.action.rolemapping.ReservedRoleMappingAction; + +import java.nio.file.Path; +import java.util.Collection; +import java.util.List; +import java.util.Optional; + +/** + * A test class that allows us to Inject new type of Reserved Handler that can + * simulate errors in saving role mappings. + *

+ * We can't use our regular path to simply make an extension of LocalStateSecurity + * in an integration test class, because the reserved handlers are injected through + * SPI. (see {@link LocalReservedUnstableSecurityStateHandlerProvider}) + */ +public final class UnstableLocalStateSecurity extends LocalStateSecurity { + + public UnstableLocalStateSecurity(Settings settings, Path configPath) throws Exception { + super(settings, configPath); + // We reuse most of the initialization of LocalStateSecurity, we then just overwrite + // the security plugin with an extra method to give us a fake RoleMappingAction. + Optional security = plugins.stream().filter(p -> p instanceof Security).findFirst(); + if (security.isPresent()) { + plugins.remove(security.get()); + } + + UnstableLocalStateSecurity thisVar = this; + var action = new ReservedUnstableRoleMappingAction(); + + plugins.add(new Security(settings, super.securityExtensions()) { + @Override + protected SSLService getSslService() { + return thisVar.getSslService(); + } + + @Override + protected XPackLicenseState getLicenseState() { + return thisVar.getLicenseState(); + } + + @Override + List> reservedClusterStateHandlers() { + // pretend the security index is initialized after 2 seconds + var timer = new java.util.Timer(); + timer.schedule(new java.util.TimerTask() { + @Override + public void run() { + action.securityIndexRecovered(); + timer.cancel(); + } + }, 2_000); + return List.of(action); + } + }); + } + + public static class ReservedUnstableRoleMappingAction extends ReservedRoleMappingAction { + /** + * Creates a fake ReservedRoleMappingAction that doesn't actually use the role mapping store + */ + public ReservedUnstableRoleMappingAction() { + // we don't actually need a NativeRoleMappingStore + super(null); + } + + /** + * The nonStateTransform method is the only one that uses the native store, we simply pretend + * something has called the onFailure method of the listener. + */ + @Override + protected void nonStateTransform( + Collection requests, + TransformState prevState, + ActionListener listener + ) { + listener.onFailure(new IllegalStateException("Fake exception")); + } + } +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/reservedstate/ReservedRoleMappingActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/reservedstate/ReservedRoleMappingActionTests.java index cac7c91f73ed1..6cdca0cb3b24d 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/reservedstate/ReservedRoleMappingActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/reservedstate/ReservedRoleMappingActionTests.java @@ -7,40 +7,77 @@ package org.elasticsearch.xpack.security.action.reservedstate; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.reservedstate.NonStateTransformResult; import org.elasticsearch.reservedstate.TransformState; +import org.elasticsearch.script.ScriptService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.security.action.rolemapping.ReservedRoleMappingAction; +import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; +import org.elasticsearch.xpack.security.support.SecurityIndexManager; import java.util.Collections; +import java.util.Set; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.atomic.AtomicReference; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.empty; -import static org.hamcrest.Matchers.nullValue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.spy; /** * Tests that the ReservedRoleMappingAction does validation, can add and remove role mappings */ public class ReservedRoleMappingActionTests extends ESTestCase { - private TransformState processJSON(ReservedRoleMappingAction action, TransformState prevState, String json) throws Exception { try (XContentParser parser = XContentType.JSON.xContent().createParser(XContentParserConfiguration.EMPTY, json)) { var content = action.fromXContent(parser); var state = action.transform(content, prevState); - assertThat(state.nonStateTransform(), nullValue()); - return state; + + CountDownLatch latch = new CountDownLatch(1); + AtomicReference> updatedKeys = new AtomicReference<>(); + AtomicReference error = new AtomicReference<>(); + state.nonStateTransform().accept(new ActionListener<>() { + @Override + public void onResponse(NonStateTransformResult nonStateTransformResult) { + updatedKeys.set(nonStateTransformResult.updatedKeys()); + latch.countDown(); + } + + @Override + public void onFailure(Exception e) { + error.set(e); + latch.countDown(); + } + }); + + latch.await(); + if (error.get() != null) { + throw error.get(); + } + return new TransformState(state.state(), updatedKeys.get()); } } public void testValidation() { + var nativeRoleMappingStore = mockNativeRoleMappingStore(); + ClusterState state = ClusterState.builder(new ClusterName("elasticsearch")).build(); TransformState prevState = new TransformState(state, Collections.emptySet()); - ReservedRoleMappingAction action = new ReservedRoleMappingAction(); + ReservedRoleMappingAction action = new ReservedRoleMappingAction(nativeRoleMappingStore); + action.securityIndexRecovered(); + String badPolicyJSON = """ { "everyone_kibana": { @@ -60,6 +97,7 @@ public void testValidation() { } } }"""; + assertEquals( "failed to parse role-mapping [everyone_fleet]. missing field [rules]", expectThrows(ParsingException.class, () -> processJSON(action, prevState, badPolicyJSON)).getMessage() @@ -67,9 +105,13 @@ public void testValidation() { } public void testAddRemoveRoleMapping() throws Exception { + var nativeRoleMappingStore = mockNativeRoleMappingStore(); + ClusterState state = ClusterState.builder(new ClusterName("elasticsearch")).build(); TransformState prevState = new TransformState(state, Collections.emptySet()); - ReservedRoleMappingAction action = new ReservedRoleMappingAction(); + ReservedRoleMappingAction action = new ReservedRoleMappingAction(nativeRoleMappingStore); + action.securityIndexRecovered(); + String emptyJSON = ""; TransformState updatedState = processJSON(action, prevState, emptyJSON); @@ -105,4 +147,102 @@ public void testAddRemoveRoleMapping() throws Exception { updatedState = processJSON(action, prevState, emptyJSON); assertThat(updatedState.keys(), empty()); } + + @SuppressWarnings("unchecked") + public void testNonStateTransformWaitsOnAsyncActions() throws Exception { + var nativeRoleMappingStore = mockNativeRoleMappingStore(); + + doAnswer(invocation -> { + new Thread(() -> { + // Simulate put role mapping async action taking a while + try { + Thread.sleep(1_000); + ((ActionListener) invocation.getArgument(1)).onFailure(new IllegalStateException("err_done")); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + }).start(); + + return null; + }).when(nativeRoleMappingStore).putRoleMapping(any(), any()); + + doAnswer(invocation -> { + new Thread(() -> { + // Simulate delete role mapping async action taking a while + try { + Thread.sleep(1_000); + ((ActionListener) invocation.getArgument(1)).onFailure(new IllegalStateException("err_done")); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + }).start(); + + return null; + }).when(nativeRoleMappingStore).deleteRoleMapping(any(), any()); + + ClusterState state = ClusterState.builder(new ClusterName("elasticsearch")).build(); + TransformState updatedState = new TransformState(state, Collections.emptySet()); + ReservedRoleMappingAction action = new ReservedRoleMappingAction(nativeRoleMappingStore); + action.securityIndexRecovered(); + + String json = """ + { + "everyone_kibana": { + "enabled": true, + "roles": [ "kibana_user" ], + "rules": { "field": { "username": "*" } }, + "metadata": { + "uuid" : "b9a59ba9-6b92-4be2-bb8d-02bb270cb3a7", + "_reserved": true + } + }, + "everyone_fleet": { + "enabled": true, + "roles": [ "fleet_user" ], + "rules": { "field": { "username": "*" } }, + "metadata": { + "uuid" : "a9a59ba9-6b92-4be2-bb8d-02bb270cb3a7", + "_reserved": true + } + } + }"""; + + assertEquals( + "err_done", + expectThrows(IllegalStateException.class, () -> processJSON(action, new TransformState(state, Collections.emptySet()), json)) + .getMessage() + ); + + // Now that we've tested that we wait on putRoleMapping correctly, let it finish without exception, so we can test error on delete + doAnswer(invocation -> { + ((ActionListener) invocation.getArgument(1)).onResponse(true); + return null; + }).when(nativeRoleMappingStore).putRoleMapping(any(), any()); + + updatedState = processJSON(action, updatedState, json); + assertThat(updatedState.keys(), containsInAnyOrder("everyone_kibana", "everyone_fleet")); + + final TransformState currentState = new TransformState(updatedState.state(), updatedState.keys()); + + assertEquals("err_done", expectThrows(IllegalStateException.class, () -> processJSON(action, currentState, "")).getMessage()); + } + + @SuppressWarnings("unchecked") + private NativeRoleMappingStore mockNativeRoleMappingStore() { + final NativeRoleMappingStore nativeRoleMappingStore = spy( + new NativeRoleMappingStore(Settings.EMPTY, mock(Client.class), mock(SecurityIndexManager.class), mock(ScriptService.class)) + ); + + doAnswer(invocation -> { + ((ActionListener) invocation.getArgument(1)).onResponse(true); + return null; + }).when(nativeRoleMappingStore).putRoleMapping(any(), any()); + + doAnswer(invocation -> { + ((ActionListener) invocation.getArgument(1)).onResponse(true); + return null; + }).when(nativeRoleMappingStore).deleteRoleMapping(any(), any()); + + return nativeRoleMappingStore; + } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingActionTests.java new file mode 100644 index 0000000000000..038e673e07862 --- /dev/null +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingActionTests.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security.action.rolemapping; + +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.Transport; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.security.action.rolemapping.DeleteRoleMappingRequest; +import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; + +import java.util.Collections; + +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.mockito.Mockito.mock; + +public class TransportDeleteRoleMappingActionTests extends ESTestCase { + public void testReservedStateHandler() { + var store = mock(NativeRoleMappingStore.class); + TransportService transportService = new TransportService( + Settings.EMPTY, + mock(Transport.class), + mock(ThreadPool.class), + TransportService.NOOP_TRANSPORT_INTERCEPTOR, + x -> null, + null, + Collections.emptySet() + ); + var action = new TransportDeleteRoleMappingAction(mock(ActionFilters.class), transportService, mock(ClusterService.class), store); + + assertEquals(ReservedRoleMappingAction.NAME, action.reservedStateHandlerName().get()); + + var deleteRequest = new DeleteRoleMappingRequest(); + deleteRequest.setName("kibana_all"); + assertThat(action.modifiedKeys(deleteRequest), containsInAnyOrder("kibana_all")); + } +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingActionTests.java index 6f789a10a3a6c..58a8e8e3d4751 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingActionTests.java @@ -9,12 +9,16 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequest; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingResponse; import org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping; @@ -29,6 +33,7 @@ import static org.hamcrest.Matchers.aMapWithSize; import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.iterableWithSize; @@ -55,7 +60,7 @@ public void setupMocks() { null, Collections.emptySet() ); - action = new TransportPutRoleMappingAction(mock(ActionFilters.class), transportService, store); + action = new TransportPutRoleMappingAction(mock(ActionFilters.class), transportService, mock(ClusterService.class), store); requestRef = new AtomicReference<>(null); @@ -94,7 +99,39 @@ private PutRoleMappingResponse put(String name, FieldExpression expression, Stri request.setMetadata(metadata); request.setEnabled(true); final PlainActionFuture future = new PlainActionFuture<>(); - action.doExecute(mock(Task.class), request, future); + action.doExecuteProtected(mock(Task.class), request, future); return future.get(); } + + public void testReservedStateHandler() throws Exception { + assertEquals(ReservedRoleMappingAction.NAME, action.reservedStateHandlerName().get()); + String json = """ + { + "everyone_kibana": { + "enabled": true, + "roles": [ "kibana_user" ], + "rules": { "field": { "username": "*" } }, + "metadata": { + "uuid" : "b9a59ba9-6b92-4be2-bb8d-02bb270cb3a7" + } + }, + "everyone_fleet": { + "enabled": true, + "roles": [ "fleet_user" ], + "rules": { "field": { "username": "*" } }, + "metadata": { + "uuid" : "b9a59ba9-6b92-4be3-bb8d-02bb270cb3a7" + } + } + }"""; + + try (XContentParser parser = XContentType.JSON.xContent().createParser(XContentParserConfiguration.EMPTY, json)) { + ReservedRoleMappingAction roleMappingAction = new ReservedRoleMappingAction(store); + var parsedResult = roleMappingAction.fromXContent(parser); + + for (var mapping : parsedResult) { + assertThat(action.modifiedKeys(PutRoleMappingRequest.fromMapping(mapping)), containsInAnyOrder(mapping.getName())); + } + } + } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java index 107f7c0632ea7..7752b85c6345c 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java @@ -103,7 +103,7 @@ import org.elasticsearch.xpack.core.security.authc.support.AuthenticationContextSerializer; import org.elasticsearch.xpack.core.security.authc.support.Hasher; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; -import org.elasticsearch.xpack.core.security.authz.RoleDescriptorTests; +import org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper; import org.elasticsearch.xpack.core.security.authz.RoleRestrictionTests; import org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissionGroup; import org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissions; @@ -1857,6 +1857,7 @@ public void testApiKeyDocCache() throws IOException, ExecutionException, Interru ApiKeyCredentials apiKeyCredentials3 = getApiKeyCredentials(docId3, apiKey3, type); final List keyRoles = List.of( RoleDescriptor.parserBuilder() + .allowRestriction(true) .allow2xFormat(true) .build() .parse("key-role", new BytesArray("{\"cluster\":[\"monitor\"]}"), XContentType.JSON) @@ -2348,12 +2349,12 @@ public void testMaybeBuildUpdatedDocument() throws IOException { final ApiKey.Type type = randomFrom(ApiKey.Type.values()); final Set oldUserRoles = type == ApiKey.Type.CROSS_CLUSTER ? Set.of() - : randomSet(0, 3, RoleDescriptorTests::randomRoleDescriptor); + : randomSet(0, 3, () -> RoleDescriptorTestHelper.builder().allowReservedMetadata(true).build()); final List oldKeyRoles; if (type == ApiKey.Type.CROSS_CLUSTER) { oldKeyRoles = List.of(CrossClusterApiKeyRoleDescriptorBuilder.parse(randomCrossClusterApiKeyAccessField()).build()); } else { - oldKeyRoles = randomList(3, RoleDescriptorTests::randomRoleDescriptor); + oldKeyRoles = randomList(3, () -> RoleDescriptorTestHelper.builder().allowReservedMetadata(true).build()); } final long now = randomMillisUpToYear9999(); when(clock.instant()).thenReturn(Instant.ofEpochMilli(now)); @@ -2388,7 +2389,10 @@ public void testMaybeBuildUpdatedDocument() throws IOException { final boolean changeExpiration = randomBoolean(); final Set newUserRoles = changeUserRoles - ? randomValueOtherThan(oldUserRoles, () -> randomSet(0, 3, RoleDescriptorTests::randomRoleDescriptor)) + ? randomValueOtherThan( + oldUserRoles, + () -> randomSet(0, 3, () -> RoleDescriptorTestHelper.builder().allowReservedMetadata(true).build()) + ) : oldUserRoles; final List newKeyRoles; if (changeKeyRoles) { @@ -2401,7 +2405,10 @@ public void testMaybeBuildUpdatedDocument() throws IOException { } }); } else { - newKeyRoles = randomValueOtherThan(oldKeyRoles, () -> randomList(0, 3, RoleDescriptorTests::randomRoleDescriptor)); + newKeyRoles = randomValueOtherThan( + oldKeyRoles, + () -> randomList(0, 3, () -> RoleDescriptorTestHelper.builder().allowReservedMetadata(true).build()) + ); } } else { newKeyRoles = randomBoolean() ? oldKeyRoles : null; @@ -2582,7 +2589,16 @@ public void testGetApiKeyMetadata() throws IOException { public void testMaybeRemoveRemoteIndicesPrivilegesWithUnsupportedVersion() { final String apiKeyId = randomAlphaOfLengthBetween(5, 8); final Set userRoleDescriptors = Set.copyOf( - randomList(2, 5, () -> RoleDescriptorTests.randomRoleDescriptor(randomBoolean(), randomBoolean(), randomBoolean(), false)) + randomList( + 2, + 5, + () -> RoleDescriptorTestHelper.builder() + .allowReservedMetadata(randomBoolean()) + .allowRemoteIndices(randomBoolean()) + .allowRestriction(randomBoolean()) + .allowRemoteClusters(false) + .build() + ) ); // Selecting random unsupported version. @@ -2615,11 +2631,7 @@ public void testMaybeRemoveRemoteIndicesPrivilegesWithUnsupportedVersion() { public void testMaybeRemoveRemoteClusterPrivilegesWithUnsupportedVersion() { final String apiKeyId = randomAlphaOfLengthBetween(5, 8); final Set userRoleDescriptors = Set.copyOf( - randomList( - 2, - 5, - () -> RoleDescriptorTests.randomRoleDescriptor(randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean()) - ) + randomList(2, 5, () -> RoleDescriptorTestHelper.builder().allowRemoteClusters(true).build()) ); // Selecting random unsupported version. @@ -2931,7 +2943,12 @@ public void testValidateOwnerUserRoleDescriptorsWithWorkflowsRestriction() { final List requestRoleDescriptors = randomList( 0, 1, - () -> RoleDescriptorTests.randomRoleDescriptor(randomBoolean(), false, randomBoolean(), false) + () -> RoleDescriptorTestHelper.builder() + .allowReservedMetadata(randomBoolean()) + .allowRemoteIndices(false) + .allowRestriction(randomBoolean()) + .allowRemoteClusters(false) + .build() ); final AbstractCreateApiKeyRequest createRequest = mock(AbstractCreateApiKeyRequest.class); @@ -2959,34 +2976,23 @@ private static RoleDescriptor randomRoleDescriptorWithRemotePrivileges() { return new RoleDescriptor( randomAlphaOfLengthBetween(3, 90), randomSubsetOf(ClusterPrivilegeResolver.names()).toArray(String[]::new), - RoleDescriptorTests.randomIndicesPrivileges(0, 3), - RoleDescriptorTests.randomApplicationPrivileges(), - RoleDescriptorTests.randomClusterPrivileges(), + RoleDescriptorTestHelper.randomIndicesPrivileges(0, 3), + RoleDescriptorTestHelper.randomApplicationPrivileges(), + RoleDescriptorTestHelper.randomClusterPrivileges(), generateRandomStringArray(5, randomIntBetween(2, 8), false, true), - RoleDescriptorTests.randomRoleDescriptorMetadata(randomBoolean()), + RoleDescriptorTestHelper.randomRoleDescriptorMetadata(randomBoolean()), Map.of(), - RoleDescriptorTests.randomRemoteIndicesPrivileges(1, 3), + RoleDescriptorTestHelper.randomRemoteIndicesPrivileges(1, 3), new RemoteClusterPermissions().addGroup( new RemoteClusterPermissionGroup(new String[] { "monitor_enrich" }, new String[] { "*" }) ), - RoleRestrictionTests.randomWorkflowsRestriction(1, 3) + RoleRestrictionTests.randomWorkflowsRestriction(1, 3), + randomAlphaOfLengthBetween(0, 10) ); } private static RoleDescriptor randomRoleDescriptorWithWorkflowsRestriction() { - return new RoleDescriptor( - randomAlphaOfLengthBetween(3, 90), - randomSubsetOf(ClusterPrivilegeResolver.names()).toArray(String[]::new), - RoleDescriptorTests.randomIndicesPrivileges(0, 3), - RoleDescriptorTests.randomApplicationPrivileges(), - RoleDescriptorTests.randomClusterPrivileges(), - generateRandomStringArray(5, randomIntBetween(2, 8), false, true), - RoleDescriptorTests.randomRoleDescriptorMetadata(randomBoolean()), - Map.of(), - null, - null, - RoleRestrictionTests.randomWorkflowsRestriction(1, 3) - ); + return RoleDescriptorTestHelper.builder().allowReservedMetadata(true).allowRestriction(true).allowRemoteIndices(false).build(); } public static String randomCrossClusterApiKeyAccessField() { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/CrossClusterAccessAuthenticationServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/CrossClusterAccessAuthenticationServiceTests.java index 20555ced32bd7..7219561dcf9df 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/CrossClusterAccessAuthenticationServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/CrossClusterAccessAuthenticationServiceTests.java @@ -164,7 +164,7 @@ public void testExceptionProcessingRequestOnInvalidCrossClusterAccessSubjectInfo // Invalid internal user AuthenticationTestHelper.builder().internal(InternalUsers.XPACK_USER).build(), new RoleDescriptorsIntersection( - new RoleDescriptor("invalid_role", new String[] { "all" }, null, null, null, null, null, null, null, null, null) + new RoleDescriptor("invalid_role", new String[] { "all" }, null, null, null, null, null, null, null, null, null, null) ) ) ); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/CrossClusterAccessHeadersTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/CrossClusterAccessHeadersTests.java index 664eec036832a..f567057d5b410 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/CrossClusterAccessHeadersTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/CrossClusterAccessHeadersTests.java @@ -19,7 +19,7 @@ import java.util.Base64; import java.util.Set; -import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTests.randomUniquelyNamedRoleDescriptors; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomUniquelyNamedRoleDescriptors; import static org.elasticsearch.xpack.security.authc.CrossClusterAccessHeaders.CROSS_CLUSTER_ACCESS_CREDENTIALS_HEADER_KEY; import static org.hamcrest.Matchers.equalTo; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceIntegTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceIntegTests.java index 08628c1a5f5af..501c0bee36264 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceIntegTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceIntegTests.java @@ -20,7 +20,7 @@ import org.elasticsearch.xpack.core.security.authc.AuthenticationTestHelper; import org.elasticsearch.xpack.core.security.authc.CrossClusterAccessSubjectInfo; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; -import org.elasticsearch.xpack.core.security.authz.RoleDescriptorTests; +import org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper; import org.elasticsearch.xpack.core.security.authz.RoleDescriptorsIntersection; import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilegeResolver; import org.elasticsearch.xpack.core.security.authz.privilege.IndexPrivilege; @@ -74,7 +74,8 @@ public void testGetRoleDescriptorsIntersectionForRemoteCluster() throws IOExcept .build(), randomNonEmptySubsetOf(List.of(concreteClusterAlias, "*")).toArray(new String[0]) ) }, - null, // TODO: add tests here + null, + null, null ) ); @@ -133,7 +134,13 @@ public void testCrossClusterAccessWithInvalidRoleDescriptors() { new RoleDescriptorsIntersection( randomValueOtherThanMany( rd -> false == rd.hasUnsupportedPrivilegesInsideAPIKeyConnectedRemoteCluster(), - () -> RoleDescriptorTests.randomRoleDescriptor() + () -> RoleDescriptorTestHelper.builder() + .allowReservedMetadata(randomBoolean()) + .allowRemoteIndices(randomBoolean()) + .allowRestriction(randomBoolean()) + .allowDescription(randomBoolean()) + .allowRemoteClusters(randomBoolean()) + .build() ) ) ); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/LoadAuthorizedIndicesTimeCheckerTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/LoadAuthorizedIndicesTimeCheckerTests.java index e06f6f212c687..8295f028588cc 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/LoadAuthorizedIndicesTimeCheckerTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/LoadAuthorizedIndicesTimeCheckerTests.java @@ -12,7 +12,6 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.TransportSearchAction; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsException; @@ -195,7 +194,6 @@ private void testLogging( ); final MockLogAppender mockAppender = new MockLogAppender(); try (var ignored = mockAppender.capturing(timerLogger.getName())) { - Loggers.addAppender(timerLogger, mockAppender); mockAppender.addExpectation(expectation); checker.accept(List.of()); mockAppender.assertAllExpectationsMatched(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/RBACEngineTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/RBACEngineTests.java index 1923d4d86dc71..d71c2b0d19074 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/RBACEngineTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/RBACEngineTests.java @@ -74,7 +74,7 @@ import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor.ApplicationResourcePrivileges; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor.IndicesPrivileges; -import org.elasticsearch.xpack.core.security.authz.RoleDescriptorTests; +import org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper; import org.elasticsearch.xpack.core.security.authz.RoleDescriptorsIntersection; import org.elasticsearch.xpack.core.security.authz.permission.ApplicationPermission; import org.elasticsearch.xpack.core.security.authz.permission.ClusterPermission; @@ -199,7 +199,13 @@ public void testResolveAuthorizationInfoForEmptyRestrictedRolesWithAuthenticatio @SuppressWarnings("unchecked") final var listener = (ActionListener>) invocation.getArgument(1); final Supplier randomRoleSupplier = () -> Role.buildFromRoleDescriptor( - RoleDescriptorTests.randomRoleDescriptor(randomBoolean(), false, randomBoolean(), false), + RoleDescriptorTestHelper.builder() + .allowReservedMetadata(randomBoolean()) + .allowRemoteIndices(false) + .allowRestriction(randomBoolean()) + .allowDescription(randomBoolean()) + .allowRemoteClusters(false) + .build(), new FieldPermissionsCache(Settings.EMPTY), RESTRICTED_INDICES, List.of() diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java index e039f0c66eaeb..fd32bde0f3c53 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java @@ -64,7 +64,7 @@ import org.elasticsearch.xpack.core.security.authc.Subject; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor.IndicesPrivileges; -import org.elasticsearch.xpack.core.security.authz.RoleDescriptorTests; +import org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper; import org.elasticsearch.xpack.core.security.authz.RoleDescriptorsIntersection; import org.elasticsearch.xpack.core.security.authz.accesscontrol.DocumentSubsetBitsetCache; import org.elasticsearch.xpack.core.security.authz.accesscontrol.IndicesAccessControl; @@ -959,7 +959,8 @@ public ClusterPermission.Builder buildPermission(ClusterPermission.Builder build RoleDescriptor.RemoteIndicesPrivileges.builder("remote-*", "remote").indices("abc-*", "xyz-*").privileges("read").build(), RoleDescriptor.RemoteIndicesPrivileges.builder("remote-*").indices("remote-idx-1-*").privileges("read").build(), }, getValidRemoteClusterPermissions(new String[] { "remote-*" }), - null + null, + randomAlphaOfLengthBetween(0, 20) ); ConfigurableClusterPrivilege ccp2 = new MockConfigurableClusterPrivilege() { @@ -988,7 +989,8 @@ public ClusterPermission.Builder buildPermission(ClusterPermission.Builder build RoleDescriptor.RemoteIndicesPrivileges.builder("*").indices("remote-idx-2-*").privileges("read").build(), RoleDescriptor.RemoteIndicesPrivileges.builder("remote-*").indices("remote-idx-3-*").privileges("read").build() }, null, - null + null, + randomAlphaOfLengthBetween(0, 20) ); FieldPermissionsCache cache = new FieldPermissionsCache(Settings.EMPTY); @@ -1100,7 +1102,15 @@ public void testBuildRoleWithSingleRemoteClusterDefinition() { } public void testBuildRoleFromDescriptorsWithSingleRestriction() { - Role role = buildRole(RoleDescriptorTests.randomRoleDescriptor(randomBoolean(), randomBoolean(), true, randomBoolean())); + Role role = buildRole( + RoleDescriptorTestHelper.builder() + .allowReservedMetadata(randomBoolean()) + .allowRemoteIndices(randomBoolean()) + .allowRestriction(true) + .allowDescription(randomBoolean()) + .allowRemoteClusters(randomBoolean()) + .build() + ); assertThat(role.hasWorkflowsRestriction(), equalTo(true)); } @@ -1108,8 +1118,20 @@ public void testBuildRoleFromDescriptorsWithViolationOfRestrictionValidation() { var e = expectThrows( IllegalArgumentException.class, () -> buildRole( - RoleDescriptorTests.randomRoleDescriptor(randomBoolean(), randomBoolean(), true, randomBoolean()), - RoleDescriptorTests.randomRoleDescriptor(randomBoolean(), randomBoolean(), true, randomBoolean()) + RoleDescriptorTestHelper.builder() + .allowReservedMetadata(randomBoolean()) + .allowRemoteIndices(randomBoolean()) + .allowRestriction(true) + .allowDescription(randomBoolean()) + .allowRemoteClusters(randomBoolean()) + .build(), + RoleDescriptorTestHelper.builder() + .allowReservedMetadata(randomBoolean()) + .allowRemoteIndices(randomBoolean()) + .allowRestriction(true) + .allowDescription(randomBoolean()) + .allowRemoteClusters(randomBoolean()) + .build() ) ); assertThat(e.getMessage(), containsString("more than one role descriptor with restriction is not allowed")); @@ -1117,9 +1139,27 @@ public void testBuildRoleFromDescriptorsWithViolationOfRestrictionValidation() { e = expectThrows( IllegalArgumentException.class, () -> buildRole( - RoleDescriptorTests.randomRoleDescriptor(randomBoolean(), randomBoolean(), true, randomBoolean()), - RoleDescriptorTests.randomRoleDescriptor(randomBoolean(), randomBoolean(), false, randomBoolean()), - RoleDescriptorTests.randomRoleDescriptor(randomBoolean(), randomBoolean(), false, randomBoolean()) + RoleDescriptorTestHelper.builder() + .allowReservedMetadata(randomBoolean()) + .allowRemoteIndices(randomBoolean()) + .allowRestriction(true) + .allowDescription(randomBoolean()) + .allowRemoteClusters(randomBoolean()) + .build(), + RoleDescriptorTestHelper.builder() + .allowReservedMetadata(randomBoolean()) + .allowRemoteIndices(randomBoolean()) + .allowRestriction(false) + .allowDescription(randomBoolean()) + .allowRemoteClusters(randomBoolean()) + .build(), + RoleDescriptorTestHelper.builder() + .allowReservedMetadata(randomBoolean()) + .allowRemoteIndices(randomBoolean()) + .allowRestriction(false) + .allowDescription(randomBoolean()) + .allowRemoteClusters(randomBoolean()) + .build() ) ); assertThat(e.getMessage(), containsString("combining role descriptors with and without restriction is not allowed")); @@ -2145,6 +2185,7 @@ public void testGetRoleForCrossClusterAccessAuthentication() throws Exception { null, null, null, + null, null ) ) @@ -3089,11 +3130,11 @@ private RoleDescriptor roleDescriptorWithIndicesPrivileges( final RoleDescriptor.RemoteIndicesPrivileges[] rips, final IndicesPrivileges[] ips ) { - return new RoleDescriptor(name, null, ips, null, null, null, null, null, rips, null, null); + return new RoleDescriptor(name, null, ips, null, null, null, null, null, rips, null, null, null); } private RoleDescriptor roleDescriptorWithRemoteClusterPrivileges(final String name, RemoteClusterPermissions remoteClusterPermissions) { - return new RoleDescriptor(name, null, null, null, null, null, null, null, null, remoteClusterPermissions, null); + return new RoleDescriptor(name, null, null, null, null, null, null, null, null, remoteClusterPermissions, null, null); } private RemoteClusterPermissions getValidRemoteClusterPermissions(String[] aliases) { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java index 3d30a3534d422..0a2c40d2a257a 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java @@ -110,7 +110,7 @@ public void testParseFile() throws Exception { new FileRoleValidator.Default() ); assertThat(roles, notNullValue()); - assertThat(roles.size(), is(10)); + assertThat(roles.size(), is(11)); RoleDescriptor descriptor = roles.get("role1"); assertNotNull(descriptor); @@ -286,6 +286,18 @@ public void testParseFile() throws Exception { assertThat(group.getQuery(), notNullValue()); assertThat(roles.get("role_query_invalid"), nullValue()); + + descriptor = roles.get("role_with_description"); + assertNotNull(descriptor); + assertThat(descriptor.getDescription(), is(equalTo("Allows all security-related operations!"))); + role = Role.buildFromRoleDescriptor(descriptor, new FieldPermissionsCache(Settings.EMPTY), restrictedIndices); + assertThat(role, notNullValue()); + assertThat(role.names(), equalTo(new String[] { "role_with_description" })); + assertThat(role.cluster(), notNullValue()); + assertThat(role.cluster().privileges(), equalTo(Set.of(ClusterPrivilegeResolver.MANAGE_SECURITY))); + assertThat(role.indices(), is(IndicesPermission.NONE)); + assertThat(role.runAs(), is(RunAsPermission.NONE)); + } public void testParseFileWithRemoteIndicesAndCluster() throws IllegalAccessException, IOException { @@ -395,7 +407,7 @@ public void testParseFileWithFLSAndDLSDisabled() throws Exception { new FileRoleValidator.Default() ); assertThat(roles, notNullValue()); - assertThat(roles.size(), is(7)); + assertThat(roles.size(), is(8)); assertThat(roles.get("role_fields"), nullValue()); assertThat(roles.get("role_query"), nullValue()); assertThat(roles.get("role_query_fields"), nullValue()); @@ -452,7 +464,7 @@ public void testParseFileWithFLSAndDLSUnlicensed() throws Exception { new FileRoleValidator.Default() ); assertThat(roles, notNullValue()); - assertThat(roles.size(), is(10)); + assertThat(roles.size(), is(11)); assertNotNull(roles.get("role_fields")); assertNotNull(roles.get("role_query")); assertNotNull(roles.get("role_query_fields")); @@ -664,7 +676,7 @@ public void testThatInvalidRoleDefinitions() throws Exception { assertThat(role, notNullValue()); assertThat(role.names(), equalTo(new String[] { "valid_role" })); - assertThat(entries, hasSize(7)); + assertThat(entries, hasSize(8)); assertThat( entries.get(0), startsWith("invalid role definition [fóóbár] in roles file [" + path.toAbsolutePath() + "]. invalid role name") @@ -675,6 +687,10 @@ public void testThatInvalidRoleDefinitions() throws Exception { assertThat(entries.get(4), startsWith("failed to parse role [role4]")); assertThat(entries.get(5), startsWith("failed to parse indices privileges for role [role5]")); assertThat(entries.get(6), startsWith("failed to parse role [role6]. unexpected field [restriction]")); + assertThat( + entries.get(7), + startsWith("invalid role definition [role7] in roles file [" + path.toAbsolutePath() + "]. invalid description") + ); } public void testThatRoleNamesDoesNotResolvePermissions() throws Exception { @@ -683,8 +699,8 @@ public void testThatRoleNamesDoesNotResolvePermissions() throws Exception { List events = CapturingLogger.output(logger.getName(), Level.ERROR); events.clear(); Set roleNames = FileRolesStore.parseFileForRoleNames(path, logger); - assertThat(roleNames.size(), is(7)); - assertThat(roleNames, containsInAnyOrder("valid_role", "role1", "role2", "role3", "role4", "role5", "role6")); + assertThat(roleNames.size(), is(8)); + assertThat(roleNames, containsInAnyOrder("valid_role", "role1", "role2", "role3", "role4", "role5", "role6", "role7")); assertThat(events, hasSize(1)); assertThat( @@ -746,7 +762,7 @@ public void testUsageStats() throws Exception { Map usageStats = store.usageStats(); - assertThat(usageStats.get("size"), is(flsDlsEnabled ? 10 : 7)); + assertThat(usageStats.get("size"), is(flsDlsEnabled ? 11 : 8)); assertThat(usageStats.get("remote_indices"), is(1L)); assertThat(usageStats.get("remote_cluster"), is(1L)); assertThat(usageStats.get("fls"), is(flsDlsEnabled)); @@ -781,7 +797,7 @@ public void testExists() throws Exception { new FileRoleValidator.Default() ); assertThat(roles, notNullValue()); - assertThat(roles.size(), is(10)); + assertThat(roles.size(), is(11)); for (var role : roles.keySet()) { assertThat(store.exists(role), is(true)); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStoreTests.java index 35591f99727f2..9d83d5f5c60ed 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStoreTests.java @@ -50,7 +50,6 @@ import org.elasticsearch.xpack.core.security.action.role.PutRoleRequest; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor.IndicesPrivileges; -import org.elasticsearch.xpack.core.security.authz.RoleDescriptorTests; import org.elasticsearch.xpack.core.security.authz.RoleRestrictionTests; import org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissionGroup; import org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissions; @@ -76,6 +75,10 @@ import static org.elasticsearch.transport.RemoteClusterPortSettings.TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY; import static org.elasticsearch.xpack.core.security.SecurityField.DOCUMENT_LEVEL_SECURITY_FEATURE; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomApplicationPrivileges; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomClusterPrivileges; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomRemoteIndicesPrivileges; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomRoleDescriptorMetadata; import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.SECURITY_MAIN_ALIAS; import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.contains; @@ -130,14 +133,15 @@ public void testRoleDescriptorWithFlsDlsLicensing() throws IOException { randomSubsetOf(ClusterPrivilegeResolver.names()).toArray(String[]::new), new IndicesPrivileges[] { IndicesPrivileges.builder().privileges("READ").indices("*").grantedFields("*").deniedFields("foo").build() }, - RoleDescriptorTests.randomApplicationPrivileges(), - RoleDescriptorTests.randomClusterPrivileges(), + randomApplicationPrivileges(), + randomClusterPrivileges(), generateRandomStringArray(5, randomIntBetween(2, 8), true, true), - RoleDescriptorTests.randomRoleDescriptorMetadata(ESTestCase.randomBoolean()), + randomRoleDescriptorMetadata(ESTestCase.randomBoolean()), null, - RoleDescriptorTests.randomRemoteIndicesPrivileges(1, 2), + randomRemoteIndicesPrivileges(1, 2), null, - null + null, + randomAlphaOfLengthBetween(0, 20) ); assertFalse(flsRole.getTransientMetadata().containsKey("unlicensed_features")); @@ -147,14 +151,15 @@ public void testRoleDescriptorWithFlsDlsLicensing() throws IOException { "dls", randomSubsetOf(ClusterPrivilegeResolver.names()).toArray(String[]::new), new IndicesPrivileges[] { IndicesPrivileges.builder().indices("*").privileges("READ").query(matchAllBytes).build() }, - RoleDescriptorTests.randomApplicationPrivileges(), - RoleDescriptorTests.randomClusterPrivileges(), + randomApplicationPrivileges(), + randomClusterPrivileges(), generateRandomStringArray(5, randomIntBetween(2, 8), true, true), - RoleDescriptorTests.randomRoleDescriptorMetadata(ESTestCase.randomBoolean()), + randomRoleDescriptorMetadata(ESTestCase.randomBoolean()), null, - RoleDescriptorTests.randomRemoteIndicesPrivileges(1, 2), + randomRemoteIndicesPrivileges(1, 2), null, - null + null, + randomAlphaOfLengthBetween(0, 20) ); assertFalse(dlsRole.getTransientMetadata().containsKey("unlicensed_features")); @@ -169,14 +174,15 @@ public void testRoleDescriptorWithFlsDlsLicensing() throws IOException { .deniedFields("foo") .query(matchAllBytes) .build() }, - RoleDescriptorTests.randomApplicationPrivileges(), - RoleDescriptorTests.randomClusterPrivileges(), + randomApplicationPrivileges(), + randomClusterPrivileges(), generateRandomStringArray(5, randomIntBetween(2, 8), true, true), - RoleDescriptorTests.randomRoleDescriptorMetadata(ESTestCase.randomBoolean()), + randomRoleDescriptorMetadata(ESTestCase.randomBoolean()), null, - RoleDescriptorTests.randomRemoteIndicesPrivileges(1, 2), + randomRemoteIndicesPrivileges(1, 2), null, - null + null, + randomAlphaOfLengthBetween(0, 20) ); assertFalse(flsDlsRole.getTransientMetadata().containsKey("unlicensed_features")); @@ -184,14 +190,15 @@ public void testRoleDescriptorWithFlsDlsLicensing() throws IOException { "no_fls_dls", randomSubsetOf(ClusterPrivilegeResolver.names()).toArray(String[]::new), new IndicesPrivileges[] { IndicesPrivileges.builder().indices("*").privileges("READ").build() }, - RoleDescriptorTests.randomApplicationPrivileges(), - RoleDescriptorTests.randomClusterPrivileges(), + randomApplicationPrivileges(), + randomClusterPrivileges(), generateRandomStringArray(5, randomIntBetween(2, 8), false, true), - RoleDescriptorTests.randomRoleDescriptorMetadata(ESTestCase.randomBoolean()), + randomRoleDescriptorMetadata(ESTestCase.randomBoolean()), null, - RoleDescriptorTests.randomRemoteIndicesPrivileges(1, 2), + randomRemoteIndicesPrivileges(1, 2), null, - null + null, + randomAlphaOfLengthBetween(0, 20) ); assertFalse(noFlsDlsRole.getTransientMetadata().containsKey("unlicensed_features")); @@ -281,14 +288,15 @@ public void testTransformingRoleWithRestrictionFails() throws IOException { : "{ \"match_all\": {} }" ) .build() }, - RoleDescriptorTests.randomApplicationPrivileges(), - RoleDescriptorTests.randomClusterPrivileges(), + randomApplicationPrivileges(), + randomClusterPrivileges(), generateRandomStringArray(5, randomIntBetween(2, 8), true, true), - RoleDescriptorTests.randomRoleDescriptorMetadata(ESTestCase.randomBoolean()), + randomRoleDescriptorMetadata(ESTestCase.randomBoolean()), null, - RoleDescriptorTests.randomRemoteIndicesPrivileges(1, 2), + randomRemoteIndicesPrivileges(1, 2), null, - RoleRestrictionTests.randomWorkflowsRestriction(1, 2) + RoleRestrictionTests.randomWorkflowsRestriction(1, 2), + randomAlphaOfLengthBetween(0, 20) ); XContentBuilder builder = roleWithRestriction.toXContent( @@ -463,6 +471,7 @@ void innerPutRole(final PutRoleRequest request, final RoleDescriptor role, final null, remoteIndicesPrivileges, remoteClusterPermissions, + null, null ); PlainActionFuture future = new PlainActionFuture<>(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/profile/ProfileServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/profile/ProfileServiceTests.java index ca974e4e1e723..f076dc24e5d5b 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/profile/ProfileServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/profile/ProfileServiceTests.java @@ -1483,6 +1483,7 @@ private static ApiKey createApiKeyForOwner(String apiKeyId, String username, Str null, null, null, + null, null ) ), diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestGetApiKeyActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestGetApiKeyActionTests.java index 810ef4056fd99..577a8eb9f698e 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestGetApiKeyActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestGetApiKeyActionTests.java @@ -42,8 +42,8 @@ import java.util.List; import java.util.Map; -import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTests.randomCrossClusterAccessRoleDescriptor; -import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTests.randomUniquelyNamedRoleDescriptors; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomCrossClusterAccessRoleDescriptor; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomUniquelyNamedRoleDescriptors; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.emptyIterable; import static org.hamcrest.Matchers.is; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/CacheInvalidatorRegistryTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/CacheInvalidatorRegistryTests.java index 8849edca70d68..6b60336276c35 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/CacheInvalidatorRegistryTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/CacheInvalidatorRegistryTests.java @@ -12,13 +12,13 @@ import org.elasticsearch.indices.SystemIndexDescriptor; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.security.support.CacheInvalidatorRegistry.CacheInvalidator; +import org.elasticsearch.xpack.security.support.SecuritySystemIndices.SecurityMainIndexMappingVersion; import org.junit.Before; import java.time.Instant; import java.util.List; import java.util.Set; -import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.INTERNAL_MAIN_INDEX_MAPPINGS_FORMAT; import static org.hamcrest.Matchers.containsString; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; @@ -61,7 +61,7 @@ public void testSecurityIndexStateChangeWillInvalidateAllRegisteredInvalidators( true, true, true, - new SystemIndexDescriptor.MappingsVersion(INTERNAL_MAIN_INDEX_MAPPINGS_FORMAT, 0), + new SystemIndexDescriptor.MappingsVersion(SecurityMainIndexMappingVersion.latest().id(), 0), ".security", ClusterHealthStatus.GREEN, IndexMetadata.State.OPEN, diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerTests.java index 2abeeb3fa040b..a7c5c616cf5bf 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerTests.java @@ -50,6 +50,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.core.security.test.TestRestrictedIndices; +import org.elasticsearch.xpack.security.support.SecuritySystemIndices.SecurityMainIndexMappingVersion; import org.elasticsearch.xpack.security.test.SecurityTestUtils; import org.hamcrest.Matchers; import org.junit.Before; @@ -63,7 +64,6 @@ import java.util.function.BiConsumer; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.INTERNAL_MAIN_INDEX_MAPPINGS_FORMAT; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; @@ -389,7 +389,10 @@ public void testCanUpdateIndexMappings() { // Ensure that the mappings for the index are out-of-date, so that the security index manager will // attempt to update them. - int previousVersion = INTERNAL_MAIN_INDEX_MAPPINGS_FORMAT - 1; + int previousVersion = randomValueOtherThanMany( + v -> v.onOrAfter(SecurityMainIndexMappingVersion.latest()), + () -> randomFrom(SecurityMainIndexMappingVersion.values()) + ).id(); // State recovered with index, with mappings with a prior version ClusterState.Builder clusterStateBuilder = createClusterState( @@ -419,11 +422,15 @@ public void testCannotUpdateIndexMappingsWhenMinMappingVersionTooLow() { // Hard-code a failure here. doReturn("Nope").when(descriptorSpy).getMinimumMappingsVersionMessage(anyString()); - doReturn(null).when(descriptorSpy).getDescriptorCompatibleWith(eq(new SystemIndexDescriptor.MappingsVersion(1, 0))); + doReturn(null).when(descriptorSpy) + .getDescriptorCompatibleWith(eq(new SystemIndexDescriptor.MappingsVersion(SecurityMainIndexMappingVersion.latest().id(), 0))); // Ensure that the mappings for the index are out-of-date, so that the security index manager will // attempt to update them. - int previousVersion = INTERNAL_MAIN_INDEX_MAPPINGS_FORMAT - 1; + int previousVersion = randomValueOtherThanMany( + v -> v.onOrAfter(SecurityMainIndexMappingVersion.latest()), + () -> randomFrom(SecurityMainIndexMappingVersion.values()) + ).id(); ClusterState.Builder clusterStateBuilder = createClusterState( TestRestrictedIndices.INTERNAL_SECURITY_MAIN_INDEX_7, @@ -457,7 +464,7 @@ public void testNoUpdateWhenIndexMappingsVersionNotBumped() { SecuritySystemIndices.SECURITY_MAIN_ALIAS, SecuritySystemIndices.INTERNAL_MAIN_INDEX_FORMAT, IndexMetadata.State.OPEN, - getMappings(INTERNAL_MAIN_INDEX_MAPPINGS_FORMAT) + getMappings(SecurityMainIndexMappingVersion.latest().id()) ); manager.clusterChanged(event(markShardsAvailable(clusterStateBuilder))); manager.prepareIndexIfNeededThenExecute(prepareException::set, () -> prepareRunnableCalled.set(true)); @@ -480,7 +487,7 @@ public void testNoUpdateWhenNoIndexMappingsVersionInClusterState() { SecuritySystemIndices.SECURITY_MAIN_ALIAS, SecuritySystemIndices.INTERNAL_MAIN_INDEX_FORMAT, IndexMetadata.State.OPEN, - getMappings(INTERNAL_MAIN_INDEX_MAPPINGS_FORMAT), + getMappings(SecurityMainIndexMappingVersion.latest().id()), Map.of() ); manager.clusterChanged(event(markShardsAvailable(clusterStateBuilder))); @@ -628,7 +635,7 @@ private static ClusterState.Builder createClusterState( format, state, mappings, - Map.of(indexName, new SystemIndexDescriptor.MappingsVersion(1, 0)) + Map.of(indexName, new SystemIndexDescriptor.MappingsVersion(SecurityMainIndexMappingVersion.latest().id(), 0)) ); } @@ -689,7 +696,7 @@ private static IndexMetadata.Builder getIndexMetadata( } private static String getMappings() { - return getMappings(INTERNAL_MAIN_INDEX_MAPPINGS_FORMAT); + return getMappings(SecurityMainIndexMappingVersion.latest().id()); } private static String getMappings(Integer version) { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityMainIndexMappingVersionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityMainIndexMappingVersionTests.java new file mode 100644 index 0000000000000..7550b96fdf4f9 --- /dev/null +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityMainIndexMappingVersionTests.java @@ -0,0 +1,35 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security.support; + +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.security.support.SecuritySystemIndices.SecurityMainIndexMappingVersion; + +import java.util.HashMap; +import java.util.Map; + +public class SecurityMainIndexMappingVersionTests extends ESTestCase { + + public void testVersionIdUniqueness() { + Map ids = new HashMap<>(); + for (var version : SecurityMainIndexMappingVersion.values()) { + var existing = ids.put(version.id(), version); + if (existing != null) { + fail( + "duplicate ID [" + + version.id() + + "] definition found in SecurityMainIndexMappingVersion for [" + + version + + "] and [" + + existing + + "]" + ); + } + } + } +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptorTests.java index 473cf5ee387b8..00f170a4cf8d8 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptorTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptorTests.java @@ -88,7 +88,7 @@ import static org.elasticsearch.xpack.core.ClientHelper.SECURITY_PROFILE_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.TRANSFORM_ORIGIN; import static org.elasticsearch.xpack.core.security.authc.CrossClusterAccessSubjectInfo.CROSS_CLUSTER_ACCESS_SUBJECT_INFO_HEADER_KEY; -import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTests.randomUniquelyNamedRoleDescriptors; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomUniquelyNamedRoleDescriptors; import static org.elasticsearch.xpack.security.authc.CrossClusterAccessHeaders.CROSS_CLUSTER_ACCESS_CREDENTIALS_HEADER_KEY; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; diff --git a/x-pack/plugin/security/src/test/resources/META-INF/services/org.elasticsearch.reservedstate.ReservedClusterStateHandlerProvider b/x-pack/plugin/security/src/test/resources/META-INF/services/org.elasticsearch.reservedstate.ReservedClusterStateHandlerProvider index 3d17572429bac..77c38d302d9c9 100644 --- a/x-pack/plugin/security/src/test/resources/META-INF/services/org.elasticsearch.reservedstate.ReservedClusterStateHandlerProvider +++ b/x-pack/plugin/security/src/test/resources/META-INF/services/org.elasticsearch.reservedstate.ReservedClusterStateHandlerProvider @@ -6,3 +6,4 @@ # org.elasticsearch.xpack.security.LocalReservedSecurityStateHandlerProvider +org.elasticsearch.xpack.security.LocalReservedUnstableSecurityStateHandlerProvider diff --git a/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authz/store/invalid_roles.yml b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authz/store/invalid_roles.yml index 21e9d87189cf0..fa0addce53035 100644 --- a/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authz/store/invalid_roles.yml +++ b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authz/store/invalid_roles.yml @@ -58,3 +58,6 @@ role6: workflows: - workflow1 - workflow2 +role7: + description: + "tJywjBJUSwXDiRtpoJxEotFupzVVUIfwnoFMFiTwRoFiURksYxmQOaoykJIYwFvNpiGnfFePFUrCPTEbDXPkXQudrpBikHSQmdqvNjxXvktEghvvIQuzZitqwKjmnQvqlDfqYXSccRiqEslDdkjdcXPmSSggJMqrXmkdNtwBItbjLpHdNPuSgVYLwcBCblGHysaXJFcZHLFbqhirxNGTkENBMpzTXjsMXwSEnqKUZtDSckxGUyFfKXCvumgJkjLrrBvSxjnanuHpmXzUlFGEHqqxJjAstxSGKnPPzzsuZAlsrLTAzAdpBOnLDMdOBDyAweiCLzIvyfwuTWcOMGRWItPUdEdqcLjlYRhOgpTuWsDQcrCYnlIuiEpBodlGwaCDYnppZWmBDMyQCSPSTCwjilXtqmTuwuxwfyCNLbqNWjzKOPhEPsKjuvNpexRhleNgMqrDpmhWOZzRZMDnLYIjNJZKdsgErOoVuyUlJAKnJlpevIZUjXDIyybxXaaFGztppkpMAOVLFHjbiJuGVDdpyBHwxlyvPJOgVeViYZNiKEOWmaIypbuWenBnYRvSdYiHHaSLwuNILDIrAqoNBiFBdMhuLvTKOkepMYFcbXpYqLWYmtPYIVXGfHPUgmYhhsfIatqwhhnefxfTeqqUlVLmLcNAjiBFiiCRfiQvtvWOWJyfATrUeCVNfquIXHzHQWPWtbpeTiYTUvEPQWeeTjKpHrycLmKpsWjCLteqlutXgaeLSAvDvbvrlJZyAWflVnuzdcNxtzfcEocKsoJGOfjKXyQlxapPvOyDZYbvHYoYljYHTrEVPbMOQuwMxKPYkbyEDJuMqOtfgqVHZpsaimFmQjTlAdNOwtDTJdJhZVzgpVTWZCJRBopvQZgbIzPEJOoCVlYRhLDRARxmlrxrAMApKaZxfiMDyhMVZKXCankStqBfYSYOmtYMvkARtngxNINwAehRhDNMZoZuGTylxteKhLqFVKudMuSCpRfCxjNsanWHVvghUJYpcxildbvAhgpU" diff --git a/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authz/store/roles.yml b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authz/store/roles.yml index cb956ff970800..ec0d325566127 100644 --- a/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authz/store/roles.yml +++ b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authz/store/roles.yml @@ -92,3 +92,9 @@ role_remote: - 'remote-*' privileges: - "monitor_enrich" + +role_with_description: + description: + "Allows all security-related operations!" + cluster: + - manage_security diff --git a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/DeleteShutdownNodeAction.java b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/DeleteShutdownNodeAction.java index 4f7b16380d0f8..4446e0aeae4db 100644 --- a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/DeleteShutdownNodeAction.java +++ b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/DeleteShutdownNodeAction.java @@ -37,7 +37,7 @@ public Request(String nodeId) { } public Request(StreamInput in) throws IOException { - if (in.getTransportVersion().isPatchFrom(TransportVersions.SHUTDOWN_REQUEST_TIMEOUTS_FIX_8_13) + if (in.getTransportVersion().isPatchFrom(TransportVersions.V_8_13_4) || in.getTransportVersion().isPatchFrom(TransportVersions.SHUTDOWN_REQUEST_TIMEOUTS_FIX_8_14) || in.getTransportVersion().onOrAfter(TransportVersions.SHUTDOWN_REQUEST_TIMEOUTS_FIX)) { // effectively super(in): @@ -50,7 +50,7 @@ public Request(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { - if (out.getTransportVersion().isPatchFrom(TransportVersions.SHUTDOWN_REQUEST_TIMEOUTS_FIX_8_13) + if (out.getTransportVersion().isPatchFrom(TransportVersions.V_8_13_4) || out.getTransportVersion().isPatchFrom(TransportVersions.SHUTDOWN_REQUEST_TIMEOUTS_FIX_8_14) || out.getTransportVersion().onOrAfter(TransportVersions.SHUTDOWN_REQUEST_TIMEOUTS_FIX)) { super.writeTo(out); diff --git a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/PutShutdownNodeAction.java b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/PutShutdownNodeAction.java index bff2b0b1793b1..8356285c10d0d 100644 --- a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/PutShutdownNodeAction.java +++ b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/PutShutdownNodeAction.java @@ -100,7 +100,7 @@ public Request( @UpdateForV9 // TODO call super(in) instead of explicitly reading superclass contents once bwc no longer needed public Request(StreamInput in) throws IOException { - if (in.getTransportVersion().isPatchFrom(TransportVersions.SHUTDOWN_REQUEST_TIMEOUTS_FIX_8_13) + if (in.getTransportVersion().isPatchFrom(TransportVersions.V_8_13_4) || in.getTransportVersion().isPatchFrom(TransportVersions.SHUTDOWN_REQUEST_TIMEOUTS_FIX_8_14) || in.getTransportVersion().onOrAfter(TransportVersions.SHUTDOWN_REQUEST_TIMEOUTS_FIX)) { // effectively super(in): @@ -126,7 +126,7 @@ public Request(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { - if (out.getTransportVersion().isPatchFrom(TransportVersions.SHUTDOWN_REQUEST_TIMEOUTS_FIX_8_13) + if (out.getTransportVersion().isPatchFrom(TransportVersions.V_8_13_4) || out.getTransportVersion().isPatchFrom(TransportVersions.SHUTDOWN_REQUEST_TIMEOUTS_FIX_8_14) || out.getTransportVersion().onOrAfter(TransportVersions.SHUTDOWN_REQUEST_TIMEOUTS_FIX)) { super.writeTo(out); diff --git a/x-pack/plugin/sql/qa/server/src/main/resources/filter.csv-spec b/x-pack/plugin/sql/qa/server/src/main/resources/filter.csv-spec index 1615ee3a64256..f6a6cec5dc65b 100644 --- a/x-pack/plugin/sql/qa/server/src/main/resources/filter.csv-spec +++ b/x-pack/plugin/sql/qa/server/src/main/resources/filter.csv-spec @@ -130,8 +130,7 @@ SELECT COUNT(*), TRUNCATE(emp_no, -2) t FROM test_emp WHERE 'aaabbb' RLIKE 'a{2, 1 |10100 ; -// AwaitsFix https://github.com/elastic/elasticsearch/issues/96805 -inWithCompatibleDateTypes-Ignore +inWithCompatibleDateTypes SELECT birth_date FROM test_emp WHERE birth_date IN ({d '1959-07-23'}, CAST('1959-12-25T00:00:00' AS TIMESTAMP), '1964-06-02T00:00:00.000Z') OR birth_date IS NULL ORDER BY birth_date; birth_date:ts diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/roles/10_basic.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/roles/10_basic.yml index edc79a8ebfc9e..db4ea4e8b205d 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/roles/10_basic.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/roles/10_basic.yml @@ -29,7 +29,10 @@ teardown: security.delete_role: name: "backwards_role" ignore: 404 - + - do: + security.delete_role: + name: "role_with_description" + ignore: 404 --- "Test put role api": - do: @@ -83,3 +86,21 @@ teardown: - match: { admin_role.metadata.key2: "val2" } - match: { admin_role.indices.0.names.0: "*" } - match: { admin_role.indices.0.privileges.0: "all" } + + - do: + security.put_role: + name: "role_with_description" + body: > + { + "description": "Allows all security-related operations such as CRUD operations on users and roles and cache clearing.", + "cluster": ["manage_security"] + } + - match: { role: { created: true } } + + - do: + headers: + Authorization: "Basic am9lOnMza3JpdC1wYXNzd29yZA==" + security.get_role: + name: "role_with_description" + - match: { role_with_description.cluster.0: "manage_security" } + - match: { role_with_description.description: "Allows all security-related operations such as CRUD operations on users and roles and cache clearing." } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/put_job.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/put_job.yml index 6560c6f470533..dd301c0a29f4f 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/put_job.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/put_job.yml @@ -94,6 +94,11 @@ setup: status: job_state: "stopped" + - do: {xpack.usage: {}} + - match: { rollup.available: true } + - match: { rollup.enabled: true } + - match: { rollup.number_of_rollup_jobs: 1 } + --- "Test put_job with existing name": diff --git a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/LegacyStackTemplateRegistry.java b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/LegacyStackTemplateRegistry.java index 0612648078edc..b2dc04c1178e4 100644 --- a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/LegacyStackTemplateRegistry.java +++ b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/LegacyStackTemplateRegistry.java @@ -43,7 +43,7 @@ public class LegacyStackTemplateRegistry extends IndexTemplateRegistry { // The stack template registry version. This number must be incremented when we make changes // to built-in templates. - public static final int REGISTRY_VERSION = 4; + public static final int REGISTRY_VERSION = 5; public static final String TEMPLATE_VERSION_VARIABLE = "xpack.stack.template.version"; diff --git a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java index 3930cfe6cd941..30323a1d7d363 100644 --- a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java +++ b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java @@ -47,7 +47,7 @@ public class StackTemplateRegistry extends IndexTemplateRegistry { // The stack template registry version. This number must be incremented when we make changes // to built-in templates. - public static final int REGISTRY_VERSION = 9; + public static final int REGISTRY_VERSION = 10; public static final String TEMPLATE_VERSION_VARIABLE = "xpack.stack.template.version"; public static final Setting STACK_TEMPLATES_ENABLED = Setting.boolSetting( @@ -107,6 +107,7 @@ public class StackTemplateRegistry extends IndexTemplateRegistry { // Kibana reporting template /////////////////////////////////// public static final String KIBANA_REPORTING_INDEX_TEMPLATE_NAME = ".kibana-reporting"; + public static final String KIBANA_REPORTING_COMPONENT_TEMPLATE_NAME = "kibana-reporting@settings"; public StackTemplateRegistry( Settings nodeSettings, @@ -229,6 +230,13 @@ protected List getLifecyclePolicies() { REGISTRY_VERSION, TEMPLATE_VERSION_VARIABLE, ADDITIONAL_TEMPLATE_VARIABLES + ), + new IndexTemplateConfig( + KIBANA_REPORTING_COMPONENT_TEMPLATE_NAME, + "/kibana-reporting@settings.json", + REGISTRY_VERSION, + TEMPLATE_VERSION_VARIABLE, + ADDITIONAL_TEMPLATE_VARIABLES ) )) { try { diff --git a/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackTemplateRegistryTests.java b/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackTemplateRegistryTests.java index 782fe3b41ae3b..abb2d5765b128 100644 --- a/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackTemplateRegistryTests.java +++ b/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackTemplateRegistryTests.java @@ -429,6 +429,7 @@ public void testSameOrHigherVersionTemplateNotUpgraded() { versions.put(StackTemplateRegistry.METRICS_MAPPINGS_COMPONENT_TEMPLATE_NAME, StackTemplateRegistry.REGISTRY_VERSION); versions.put(StackTemplateRegistry.SYNTHETICS_SETTINGS_COMPONENT_TEMPLATE_NAME, StackTemplateRegistry.REGISTRY_VERSION); versions.put(StackTemplateRegistry.SYNTHETICS_MAPPINGS_COMPONENT_TEMPLATE_NAME, StackTemplateRegistry.REGISTRY_VERSION); + versions.put(StackTemplateRegistry.KIBANA_REPORTING_COMPONENT_TEMPLATE_NAME, StackTemplateRegistry.REGISTRY_VERSION); ClusterChangedEvent sameVersionEvent = createClusterChangedEvent(versions, nodes); client.setVerifier((action, request, listener) -> { if (action instanceof PutComponentTemplateAction) { @@ -484,6 +485,10 @@ public void testSameOrHigherVersionTemplateNotUpgraded() { StackTemplateRegistry.SYNTHETICS_MAPPINGS_COMPONENT_TEMPLATE_NAME, StackTemplateRegistry.REGISTRY_VERSION + randomIntBetween(1, 1000) ); + versions.put( + StackTemplateRegistry.KIBANA_REPORTING_COMPONENT_TEMPLATE_NAME, + StackTemplateRegistry.REGISTRY_VERSION + randomIntBetween(1, 1000) + ); ClusterChangedEvent higherVersionEvent = createClusterChangedEvent(versions, nodes); registry.clusterChanged(higherVersionEvent); } diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexer.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexer.java index ed0f721f5f7f0..df8c3f62034e5 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexer.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexer.java @@ -193,7 +193,11 @@ protected void handleBulkResponse(BulkResponse bulkResponse, ActionListener listener) { }, listener::onFailure); var deducedDestIndexMappings = new SetOnce>(); - var shouldMaybeCreateDestIndexForUnattended = context.getCheckpoint() == 0 - && TransformEffectiveSettings.isUnattended(transformConfig.getSettings()); + + // if the unattended transform had not created the destination index yet, or if the destination index was deleted for any + // type of transform during the last run, then we try to create the destination index. + // This is important to create the destination index explicitly before indexing documents. Otherwise, the destination + // index aliases may be missing. + var shouldMaybeCreateDestIndex = isFirstUnattendedRun() || context.shouldRecreateDestinationIndex(); ActionListener> fieldMappingsListener = ActionListener.wrap(destIndexMappings -> { if (destIndexMappings.isEmpty() == false) { @@ -359,11 +363,12 @@ protected void onStart(long now, ActionListener listener) { // ... otherwise we fall back to index mappings deduced based on source indices this.fieldMappings = deducedDestIndexMappings.get(); } - // Since the unattended transform could not have created the destination index yet, we do it here. - // This is important to create the destination index explicitly before indexing first documents. Otherwise, the destination - // index aliases may be missing. - if (destIndexMappings.isEmpty() && shouldMaybeCreateDestIndexForUnattended) { - doMaybeCreateDestIndex(deducedDestIndexMappings.get(), configurationReadyListener); + + if (destIndexMappings.isEmpty() && shouldMaybeCreateDestIndex) { + doMaybeCreateDestIndex(deducedDestIndexMappings.get(), configurationReadyListener.delegateFailure((delegate, response) -> { + context.setShouldRecreateDestinationIndex(false); + delegate.onResponse(response); + })); } else { configurationReadyListener.onResponse(null); } @@ -380,7 +385,7 @@ protected void onStart(long now, ActionListener listener) { deducedDestIndexMappings.set(validationResponse.getDestIndexMappings()); if (isContinuous()) { transformsConfigManager.getTransformConfiguration(getJobId(), ActionListener.wrap(config -> { - if (transformConfig.equals(config) && fieldMappings != null && shouldMaybeCreateDestIndexForUnattended == false) { + if (transformConfig.equals(config) && fieldMappings != null && shouldMaybeCreateDestIndex == false) { logger.trace("[{}] transform config has not changed.", getJobId()); configurationReadyListener.onResponse(null); } else { @@ -415,7 +420,7 @@ protected void onStart(long now, ActionListener listener) { }, listener::onFailure); Instant instantOfTrigger = Instant.ofEpochMilli(now); - // If we are not on the initial batch checkpoint and its the first pass of whatever continuous checkpoint we are on, + // If we are not on the initial batch checkpoint and it's the first pass of whatever continuous checkpoint we are on, // we should verify if there are local changes based on the sync config. If not, do not proceed further and exit. if (context.getCheckpoint() > 0 && initialRun()) { checkpointProvider.sourceHasChanged(getLastCheckpoint(), ActionListener.wrap(hasChanged -> { @@ -436,8 +441,7 @@ protected void onStart(long now, ActionListener listener) { hasSourceChanged = true; listener.onFailure(failure); })); - } else if (context.getCheckpoint() == 0 && TransformEffectiveSettings.isUnattended(transformConfig.getSettings())) { - // this transform runs in unattended mode and has never run, to go on + } else if (shouldMaybeCreateDestIndex) { validate(changedSourceListener); } else { hasSourceChanged = true; @@ -447,6 +451,13 @@ protected void onStart(long now, ActionListener listener) { } } + /** + * Returns true if this transform runs in unattended mode and has never run. + */ + private boolean isFirstUnattendedRun() { + return context.getCheckpoint() == 0 && TransformEffectiveSettings.isUnattended(transformConfig.getSettings()); + } + protected void initializeFunction() { // create the function function = FunctionFactory.create(getConfig()); diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/utils/ExceptionRootCauseFinder.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/utils/ExceptionRootCauseFinder.java index 8618b01a0440b..8bf859a020ba4 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/utils/ExceptionRootCauseFinder.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/utils/ExceptionRootCauseFinder.java @@ -9,6 +9,7 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.bulk.BulkItemResponse; +import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchContextMissingException; import org.elasticsearch.tasks.TaskCancelledException; @@ -63,7 +64,7 @@ public static Throwable getFirstIrrecoverableExceptionFromBulkResponses(Collecti } if (unwrappedThrowable instanceof ElasticsearchException elasticsearchException) { - if (isExceptionIrrecoverable(elasticsearchException)) { + if (isExceptionIrrecoverable(elasticsearchException) && isNotIndexNotFoundException(elasticsearchException)) { return elasticsearchException; } } @@ -72,6 +73,15 @@ public static Throwable getFirstIrrecoverableExceptionFromBulkResponses(Collecti return null; } + /** + * We can safely recover from IndexNotFoundExceptions on Bulk responses. + * If the transform is running, the next checkpoint will recreate the index. + * If the transform is not running, the next start request will recreate the index. + */ + private static boolean isNotIndexNotFoundException(ElasticsearchException elasticsearchException) { + return elasticsearchException instanceof IndexNotFoundException == false; + } + public static boolean isExceptionIrrecoverable(ElasticsearchException elasticsearchException) { if (IRRECOVERABLE_REST_STATUSES.contains(elasticsearchException.status())) { diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerFailureHandlingTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerFailureHandlingTests.java index fe54847af0404..eeef51bcbcb06 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerFailureHandlingTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerFailureHandlingTests.java @@ -10,15 +10,17 @@ import org.apache.lucene.search.TotalHits; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ElasticsearchTimeoutException; +import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.ShardSearchFailure; -import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.ParentTaskAssigningClient; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; @@ -27,6 +29,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.reindex.BulkByScrollResponse; import org.elasticsearch.index.reindex.DeleteByQueryRequest; import org.elasticsearch.script.ScriptException; @@ -35,7 +38,6 @@ import org.elasticsearch.search.profile.SearchProfileResults; import org.elasticsearch.search.suggest.Suggest; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.client.NoOpClient; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.common.notifications.Level; import org.elasticsearch.xpack.core.indexing.IndexerState; @@ -75,6 +77,7 @@ import java.util.concurrent.atomic.AtomicReference; import java.util.function.Function; import java.util.stream.Collectors; +import java.util.stream.Stream; import static java.util.Collections.singletonList; import static org.elasticsearch.xpack.core.transform.transforms.DestConfigTests.randomDestConfig; @@ -85,6 +88,7 @@ import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.matchesRegex; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.doAnswer; @@ -99,8 +103,11 @@ */ public class TransformIndexerFailureHandlingTests extends ESTestCase { - private Client client; private ThreadPool threadPool; + private static final Function EMPTY_BULK_RESPONSE = bulkRequest -> new BulkResponse( + new BulkItemResponse[0], + 100 + ); static class MockedTransformIndexer extends ClientTransformIndexer { @@ -110,13 +117,13 @@ static class MockedTransformIndexer extends ClientTransformIndexer { // used for synchronizing with the test private CountDownLatch latch; + private int doProcessCount; MockedTransformIndexer( ThreadPool threadPool, ClusterService clusterService, IndexNameExpressionResolver indexNameExpressionResolver, TransformExtension transformExtension, - String executorName, IndexBasedTransformConfigManager transformsConfigManager, CheckpointProvider checkpointProvider, TransformConfig transformConfig, @@ -127,7 +134,8 @@ static class MockedTransformIndexer extends ClientTransformIndexer { TransformContext context, Function searchFunction, Function bulkFunction, - Function deleteByQueryFunction + Function deleteByQueryFunction, + int doProcessCount ) { super( threadPool, @@ -157,6 +165,7 @@ static class MockedTransformIndexer extends ClientTransformIndexer { this.searchFunction = searchFunction; this.bulkFunction = bulkFunction; this.deleteByQueryFunction = deleteByQueryFunction; + this.doProcessCount = doProcessCount; } public void initialize() { @@ -182,12 +191,7 @@ protected void doNextSearch(long waitTimeInNanos, ActionListener throw new IllegalStateException(e); } - try { - SearchResponse response = searchFunction.apply(buildSearchRequest().v2()); - nextPhase.onResponse(response); - } catch (Exception e) { - nextPhase.onFailure(e); - } + ActionListener.run(nextPhase, l -> ActionListener.respondAndRelease(l, searchFunction.apply(buildSearchRequest().v2()))); } @Override @@ -278,12 +282,22 @@ void doGetFieldMappings(ActionListener> fieldMappingsListene protected void persistState(TransformState state, ActionListener listener) { listener.onResponse(null); } + + @Override + protected IterationResult doProcess(SearchResponse searchResponse) { + if (doProcessCount > 0) { + doProcessCount -= 1; + // pretend that we processed 10k documents for each call + getStats().incrementNumDocuments(10_000); + return new IterationResult<>(Stream.of(new IndexRequest()), new TransformIndexerPosition(null, null), false); + } + return super.doProcess(searchResponse); + } } @Before public void setUpMocks() { threadPool = createThreadPool(); - client = new NoOpClient(threadPool); } @After @@ -325,17 +339,7 @@ public void testPageSizeAdapt() throws Exception { TransformAuditor auditor = MockTransformAuditor.createMockAuditor(); TransformContext context = new TransformContext(TransformTaskState.STARTED, "", 0, mock(TransformContext.Listener.class)); - MockedTransformIndexer indexer = createMockIndexer( - config, - state, - searchFunction, - bulkFunction, - null, - threadPool, - ThreadPool.Names.GENERIC, - auditor, - context - ); + MockedTransformIndexer indexer = createMockIndexer(config, state, searchFunction, bulkFunction, null, threadPool, auditor, context); final CountDownLatch latch = indexer.newLatch(1); indexer.start(); assertThat(indexer.getState(), equalTo(IndexerState.STARTED)); @@ -415,7 +419,6 @@ public void testDoProcessAggNullCheck() { bulkFunction, null, threadPool, - ThreadPool.Names.GENERIC, auditor, context ); @@ -476,17 +479,7 @@ public void testScriptError() throws Exception { TransformContext.Listener contextListener = createContextListener(failIndexerCalled, failureMessage); TransformContext context = new TransformContext(TransformTaskState.STARTED, "", 0, contextListener); - MockedTransformIndexer indexer = createMockIndexer( - config, - state, - searchFunction, - bulkFunction, - null, - threadPool, - ThreadPool.Names.GENERIC, - auditor, - context - ); + MockedTransformIndexer indexer = createMockIndexer(config, state, searchFunction, bulkFunction, null, threadPool, auditor, context); final CountDownLatch latch = indexer.newLatch(1); @@ -542,7 +535,10 @@ public void testRetentionPolicyDeleteByQueryThrowsIrrecoverable() throws Excepti ); try { AtomicReference state = new AtomicReference<>(IndexerState.STOPPED); - Function searchFunction = searchRequest -> searchResponse; + Function searchFunction = searchRequest -> { + searchResponse.mustIncRef(); + return searchResponse; + }; Function bulkFunction = bulkRequest -> new BulkResponse(new BulkItemResponse[0], 100); @@ -571,7 +567,6 @@ public void testRetentionPolicyDeleteByQueryThrowsIrrecoverable() throws Excepti bulkFunction, deleteByQueryFunction, threadPool, - ThreadPool.Names.GENERIC, auditor, context ); @@ -635,7 +630,10 @@ public void testRetentionPolicyDeleteByQueryThrowsTemporaryProblem() throws Exce ); try { AtomicReference state = new AtomicReference<>(IndexerState.STOPPED); - Function searchFunction = searchRequest -> searchResponse; + Function searchFunction = searchRequest -> { + searchResponse.mustIncRef(); + return searchResponse; + }; Function bulkFunction = bulkRequest -> new BulkResponse(new BulkItemResponse[0], 100); @@ -670,7 +668,6 @@ public void testRetentionPolicyDeleteByQueryThrowsTemporaryProblem() throws Exce bulkFunction, deleteByQueryFunction, threadPool, - ThreadPool.Names.GENERIC, auditor, context ); @@ -744,6 +741,7 @@ public SearchResponse apply(SearchRequest searchRequest) { new ShardSearchFailure[] { new ShardSearchFailure(new Exception()) } ); } + searchResponse.mustIncRef(); return searchResponse; } }; @@ -764,7 +762,6 @@ public SearchResponse apply(SearchRequest searchRequest) { bulkFunction, null, threadPool, - ThreadPool.Names.GENERIC, auditor, context ); @@ -865,17 +862,7 @@ public void testHandleFailureAuditing() { ) ); - MockedTransformIndexer indexer = createMockIndexer( - config, - state, - searchFunction, - bulkFunction, - null, - threadPool, - ThreadPool.Names.GENERIC, - auditor, - context - ); + MockedTransformIndexer indexer = createMockIndexer(config, state, searchFunction, bulkFunction, null, threadPool, auditor, context); indexer.handleFailure( new SearchPhaseExecutionException( @@ -936,6 +923,151 @@ public void testHandleFailureAuditing() { auditor.assertAllExpectationsMatched(); } + /** + * Given no bulk upload errors + * When we run the indexer + * Then we should not fail or recreate the destination index + */ + public void testHandleBulkResponseWithNoFailures() throws Exception { + var indexer = runIndexer(createMockIndexer(returnHit(), EMPTY_BULK_RESPONSE)); + assertThat(indexer.getStats().getIndexFailures(), is(0L)); + assertFalse(indexer.context.shouldRecreateDestinationIndex()); + assertNull(indexer.context.getLastFailure()); + } + + private static TransformIndexer runIndexer(MockedTransformIndexer indexer) throws Exception { + var latch = indexer.newLatch(1); + indexer.start(); + assertThat(indexer.getState(), equalTo(IndexerState.STARTED)); + assertTrue(indexer.maybeTriggerAsyncJob(System.currentTimeMillis())); + assertThat(indexer.getState(), equalTo(IndexerState.INDEXING)); + latch.countDown(); + assertBusy(() -> assertThat(indexer.getState(), equalTo(IndexerState.STARTED)), 10, TimeUnit.SECONDS); + return indexer; + } + + private MockedTransformIndexer createMockIndexer( + Function searchFunction, + Function bulkFunction + ) { + return createMockIndexer(searchFunction, bulkFunction, mock(TransformContext.Listener.class)); + } + + private static Function returnHit() { + return request -> new SearchResponse( + SearchHits.unpooled(new SearchHit[] { SearchHit.unpooled(1) }, new TotalHits(1L, TotalHits.Relation.EQUAL_TO), 1.0f), + // Simulate completely null aggs + null, + new Suggest(Collections.emptyList()), + false, + false, + new SearchProfileResults(Collections.emptyMap()), + 1, + "", + 1, + 1, + 0, + 0, + ShardSearchFailure.EMPTY_ARRAY, + SearchResponse.Clusters.EMPTY + ); + } + + /** + * Given an irrecoverable bulk upload error + * When we run the indexer + * Then we should fail without retries and not recreate the destination index + */ + public void testHandleBulkResponseWithIrrecoverableFailures() throws Exception { + var failCalled = new AtomicBoolean(); + var indexer = runIndexer( + createMockIndexer( + returnHit(), + bulkResponseWithError(new ResourceNotFoundException("resource not found error")), + createContextListener(failCalled, new AtomicReference<>()) + ) + ); + assertThat(indexer.getStats().getIndexFailures(), is(1L)); + assertFalse(indexer.context.shouldRecreateDestinationIndex()); + assertTrue(failCalled.get()); + } + + private MockedTransformIndexer createMockIndexer( + Function searchFunction, + Function bulkFunction, + TransformContext.Listener listener + ) { + return createMockIndexer( + new TransformConfig( + randomAlphaOfLength(10), + randomSourceConfig(), + randomDestConfig(), + null, + null, + null, + randomPivotConfig(), + null, + randomBoolean() ? null : randomAlphaOfLengthBetween(1, 1000), + new SettingsConfig.Builder().setMaxPageSearchSize(randomBoolean() ? null : randomIntBetween(500, 10_000)).build(), + null, + null, + null, + null + ), + new AtomicReference<>(IndexerState.STOPPED), + searchFunction, + bulkFunction, + null, + threadPool, + mock(TransformAuditor.class), + new TransformContext(TransformTaskState.STARTED, "", 0, listener), + 1 + ); + } + + private static Function bulkResponseWithError(Exception e) { + return bulkRequest -> new BulkResponse( + new BulkItemResponse[] { + BulkItemResponse.failure(1, DocWriteRequest.OpType.INDEX, new BulkItemResponse.Failure("the_index", "id", e)) }, + 100 + ); + } + + /** + * Given an IndexNotFound bulk upload error + * When we run the indexer + * Then we should fail with retries and recreate the destination index + */ + public void testHandleBulkResponseWithIndexNotFound() throws Exception { + var indexer = runIndexerWithBulkResponseError(new IndexNotFoundException("Some Error")); + assertThat(indexer.getStats().getIndexFailures(), is(1L)); + assertTrue(indexer.context.shouldRecreateDestinationIndex()); + assertFalse(bulkIndexingException(indexer).isIrrecoverable()); + } + + private TransformIndexer runIndexerWithBulkResponseError(Exception e) throws Exception { + return runIndexer(createMockIndexer(returnHit(), bulkResponseWithError(e))); + } + + private static BulkIndexingException bulkIndexingException(TransformIndexer indexer) { + var lastFailure = indexer.context.getLastFailure(); + assertNotNull(lastFailure); + assertThat(lastFailure, instanceOf(BulkIndexingException.class)); + return (BulkIndexingException) lastFailure; + } + + /** + * Given a recoverable bulk upload error + * When we run the indexer + * Then we should fail with retries and not recreate the destination index + */ + public void testHandleBulkResponseWithNoIrrecoverableFailures() throws Exception { + var indexer = runIndexerWithBulkResponseError(new EsRejectedExecutionException("es rejected execution")); + assertThat(indexer.getStats().getIndexFailures(), is(1L)); + assertFalse(indexer.context.shouldRecreateDestinationIndex()); + assertFalse(bulkIndexingException(indexer).isIrrecoverable()); + } + public void testHandleFailure() { testHandleFailure(0, 5, 0, 0); testHandleFailure(5, 0, 5, 2); @@ -996,17 +1128,7 @@ private void testHandleFailure( ) ); - MockedTransformIndexer indexer = createMockIndexer( - config, - state, - searchFunction, - bulkFunction, - null, - threadPool, - ThreadPool.Names.GENERIC, - auditor, - context - ); + MockedTransformIndexer indexer = createMockIndexer(config, state, searchFunction, bulkFunction, null, threadPool, auditor, context); for (int i = 0; i < expectedEffectiveNumFailureRetries; ++i) { indexer.handleFailure(new Exception("exception no. " + (i + 1))); @@ -1039,14 +1161,26 @@ private MockedTransformIndexer createMockIndexer( Function bulkFunction, Function deleteByQueryFunction, ThreadPool threadPool, - String executorName, TransformAuditor auditor, TransformContext context + ) { + return createMockIndexer(config, state, searchFunction, bulkFunction, deleteByQueryFunction, threadPool, auditor, context, 0); + } + + private MockedTransformIndexer createMockIndexer( + TransformConfig config, + AtomicReference state, + Function searchFunction, + Function bulkFunction, + Function deleteByQueryFunction, + ThreadPool threadPool, + TransformAuditor auditor, + TransformContext context, + int doProcessCount ) { IndexBasedTransformConfigManager transformConfigManager = mock(IndexBasedTransformConfigManager.class); doAnswer(invocationOnMock -> { - @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener) invocationOnMock.getArguments()[1]; + ActionListener listener = invocationOnMock.getArgument(1); listener.onResponse(config); return null; }).when(transformConfigManager).getTransformConfiguration(any(), any()); @@ -1055,7 +1189,6 @@ private MockedTransformIndexer createMockIndexer( mock(ClusterService.class), mock(IndexNameExpressionResolver.class), mock(TransformExtension.class), - executorName, transformConfigManager, mock(CheckpointProvider.class), config, @@ -1066,7 +1199,8 @@ private MockedTransformIndexer createMockIndexer( context, searchFunction, bulkFunction, - deleteByQueryFunction + deleteByQueryFunction, + doProcessCount ); indexer.initialize(); diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerStateTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerStateTests.java index a474976cf9dfa..01a2db839b7d8 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerStateTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerStateTests.java @@ -88,7 +88,7 @@ public class TransformIndexerStateTests extends ESTestCase { private static final SearchResponse ONE_HIT_SEARCH_RESPONSE = new SearchResponse( - new SearchHits(new SearchHit[] { new SearchHit(1) }, new TotalHits(1L, TotalHits.Relation.EQUAL_TO), 1.0f), + SearchHits.unpooled(new SearchHit[] { SearchHit.unpooled(1) }, new TotalHits(1L, TotalHits.Relation.EQUAL_TO), 1.0f), // Simulate completely null aggs null, new Suggest(Collections.emptyList()), diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/utils/ExceptionRootCauseFinderTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/utils/ExceptionRootCauseFinderTests.java index b71156cad5adf..9a0431d40a972 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/utils/ExceptionRootCauseFinderTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/utils/ExceptionRootCauseFinderTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.index.Index; +import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.mapper.DocumentParsingException; import org.elasticsearch.index.mapper.MapperException; import org.elasticsearch.index.shard.ShardId; @@ -27,116 +28,27 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.XContentLocation; +import java.util.Arrays; import java.util.Collection; -import java.util.HashMap; import java.util.Map; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.Function; +import java.util.stream.Collectors; public class ExceptionRootCauseFinderTests extends ESTestCase { public void testGetFirstIrrecoverableExceptionFromBulkResponses() { - Map bulkItemResponses = new HashMap<>(); - - int id = 1; - // 1 - bulkItemResponses.put( - id, - BulkItemResponse.failure( - id++, - OpType.INDEX, - new BulkItemResponse.Failure( - "the_index", - "id", - new DocumentParsingException(XContentLocation.UNKNOWN, "document parsing error") - ) - ) - ); - // 2 - bulkItemResponses.put( - id, - BulkItemResponse.failure( - id++, - OpType.INDEX, - new BulkItemResponse.Failure("the_index", "id", new ResourceNotFoundException("resource not found error")) - ) - ); - // 3 - bulkItemResponses.put( - id, - BulkItemResponse.failure( - id++, - OpType.INDEX, - new BulkItemResponse.Failure("the_index", "id", new IllegalArgumentException("illegal argument error")) - ) - ); - // 4 not irrecoverable - bulkItemResponses.put( - id, - BulkItemResponse.failure( - id++, - OpType.INDEX, - new BulkItemResponse.Failure("the_index", "id", new EsRejectedExecutionException("es rejected execution")) - ) - ); - // 5 not irrecoverable - bulkItemResponses.put( - id, - BulkItemResponse.failure( - id++, - OpType.INDEX, - new BulkItemResponse.Failure("the_index", "id", new TranslogException(new ShardId("the_index", "uid", 0), "translog error")) - ) - ); - // 6 - bulkItemResponses.put( - id, - BulkItemResponse.failure( - id++, - OpType.INDEX, - new BulkItemResponse.Failure( - "the_index", - "id", - new ElasticsearchSecurityException("Authentication required", RestStatus.UNAUTHORIZED) - ) - ) - ); - // 7 - bulkItemResponses.put( - id, - BulkItemResponse.failure( - id++, - OpType.INDEX, - new BulkItemResponse.Failure( - "the_index", - "id", - new ElasticsearchSecurityException("current license is non-compliant for [transform]", RestStatus.FORBIDDEN) - ) - ) - ); - // 8 not irrecoverable - bulkItemResponses.put( - id, - BulkItemResponse.failure( - id++, - OpType.INDEX, - new BulkItemResponse.Failure( - "the_index", - "id", - new ElasticsearchSecurityException("overloaded, to many requests", RestStatus.TOO_MANY_REQUESTS) - ) - ) - ); - // 9 not irrecoverable - bulkItemResponses.put( - id, - BulkItemResponse.failure( - id++, - OpType.INDEX, - new BulkItemResponse.Failure( - "the_index", - "id", - new ElasticsearchSecurityException("internal error", RestStatus.INTERNAL_SERVER_ERROR) - ) - ) + Map bulkItemResponses = bulkItemResponses( + new DocumentParsingException(XContentLocation.UNKNOWN, "document parsing error"), + new ResourceNotFoundException("resource not found error"), + new IllegalArgumentException("illegal argument error"), + new EsRejectedExecutionException("es rejected execution"), + new TranslogException(new ShardId("the_index", "uid", 0), "translog error"), + new ElasticsearchSecurityException("Authentication required", RestStatus.UNAUTHORIZED), + new ElasticsearchSecurityException("current license is non-compliant for [transform]", RestStatus.FORBIDDEN), + new ElasticsearchSecurityException("overloaded, to many requests", RestStatus.TOO_MANY_REQUESTS), + new ElasticsearchSecurityException("internal error", RestStatus.INTERNAL_SERVER_ERROR), + new IndexNotFoundException("some missing index") ); assertFirstException(bulkItemResponses.values(), DocumentParsingException.class, "document parsing error"); @@ -157,6 +69,14 @@ public void testGetFirstIrrecoverableExceptionFromBulkResponses() { assertNull(ExceptionRootCauseFinder.getFirstIrrecoverableExceptionFromBulkResponses(bulkItemResponses.values())); } + private static Map bulkItemResponses(Exception... exceptions) { + var id = new AtomicInteger(1); + return Arrays.stream(exceptions) + .map(exception -> new BulkItemResponse.Failure("the_index", "id", exception)) + .map(failure -> BulkItemResponse.failure(id.get(), OpType.INDEX, failure)) + .collect(Collectors.toMap(response -> id.getAndIncrement(), Function.identity())); + } + public void testIsIrrecoverable() { assertFalse(ExceptionRootCauseFinder.isExceptionIrrecoverable(new MapperException("mappings problem"))); assertFalse(ExceptionRootCauseFinder.isExceptionIrrecoverable(new TaskCancelledException("cancelled task"))); @@ -174,6 +94,7 @@ public void testIsIrrecoverable() { assertTrue( ExceptionRootCauseFinder.isExceptionIrrecoverable(new DocumentParsingException(new XContentLocation(1, 2), "parse error")) ); + assertTrue(ExceptionRootCauseFinder.isExceptionIrrecoverable(new IndexNotFoundException("some missing index"))); } private static void assertFirstException(Collection bulkItemResponses, Class expectedClass, String message) { diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/ApiKeyBackwardsCompatibilityIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/ApiKeyBackwardsCompatibilityIT.java index 84c8b0bd95b4f..8a775c7f7d3d8 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/ApiKeyBackwardsCompatibilityIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/ApiKeyBackwardsCompatibilityIT.java @@ -26,7 +26,6 @@ import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; -import org.elasticsearch.xpack.core.security.authz.RoleDescriptorTests; import org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissions; import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.test.SecuritySettingsSourceField; @@ -44,6 +43,11 @@ import java.util.function.Consumer; import static org.elasticsearch.transport.RemoteClusterPortSettings.TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomApplicationPrivileges; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomIndicesPrivileges; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomRemoteClusterPermissions; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomRemoteIndicesPrivileges; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomRoleDescriptorMetadata; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -420,16 +424,15 @@ private static RoleDescriptor randomRoleDescriptor(boolean includeRemoteDescript return new RoleDescriptor( randomAlphaOfLengthBetween(3, 90), randomSubsetOf(Set.of("all", "monitor", "none")).toArray(String[]::new), - RoleDescriptorTests.randomIndicesPrivileges(0, 3, excludedPrivileges), - RoleDescriptorTests.randomApplicationPrivileges(), + randomIndicesPrivileges(0, 3, excludedPrivileges), + randomApplicationPrivileges(), null, generateRandomStringArray(5, randomIntBetween(2, 8), false, true), - RoleDescriptorTests.randomRoleDescriptorMetadata(false), + randomRoleDescriptorMetadata(false), Map.of(), - includeRemoteDescriptors ? RoleDescriptorTests.randomRemoteIndicesPrivileges(1, 3, excludedPrivileges) : null, - includeRemoteDescriptors - ? RoleDescriptorTests.randomRemoteClusterPermissions(randomIntBetween(1, 3)) - : RemoteClusterPermissions.NONE, + includeRemoteDescriptors ? randomRemoteIndicesPrivileges(1, 3, excludedPrivileges) : null, + includeRemoteDescriptors ? randomRemoteClusterPermissions(randomIntBetween(1, 3)) : RemoteClusterPermissions.NONE, + null, null ); } diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RolesBackwardsCompatibilityIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RolesBackwardsCompatibilityIT.java new file mode 100644 index 0000000000000..4f4ff1d5743ee --- /dev/null +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RolesBackwardsCompatibilityIT.java @@ -0,0 +1,268 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.upgrades; + +import org.apache.http.HttpHost; +import org.elasticsearch.Build; +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.test.XContentTestUtils; +import org.elasticsearch.test.rest.ObjectPath; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; + +import java.io.IOException; +import java.io.UncheckedIOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomApplicationPrivileges; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomIndicesPrivileges; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomRoleDescriptorMetadata; +import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.notNullValue; + +public class RolesBackwardsCompatibilityIT extends AbstractUpgradeTestCase { + + private RestClient oldVersionClient = null; + private RestClient newVersionClient = null; + + public void testCreatingAndUpdatingRoles() throws Exception { + assumeTrue( + "The role description is supported after transport version: " + TransportVersions.SECURITY_ROLE_DESCRIPTION, + minimumTransportVersion().before(TransportVersions.SECURITY_ROLE_DESCRIPTION) + ); + switch (CLUSTER_TYPE) { + case OLD -> { + // Creating role in "old" cluster should succeed when description is not provided + final String initialRole = randomRoleDescriptorSerialized(false); + createRole(client(), "my-old-role", initialRole); + updateRole("my-old-role", randomValueOtherThan(initialRole, () -> randomRoleDescriptorSerialized(false))); + + // and fail if we include description + var createException = expectThrows( + Exception.class, + () -> createRole(client(), "my-invalid-old-role", randomRoleDescriptorSerialized(true)) + ); + assertThat( + createException.getMessage(), + containsString("failed to parse role [my-invalid-old-role]. unexpected field [description]") + ); + + RestClient client = client(); + var updateException = expectThrows( + Exception.class, + () -> updateRole(client, "my-old-role", randomRoleDescriptorSerialized(true)) + ); + assertThat( + updateException.getMessage(), + containsString("failed to parse role [my-old-role]. unexpected field [description]") + ); + } + case MIXED -> { + try { + this.createClientsByVersion(); + // succeed when role description is not provided + final String initialRole = randomRoleDescriptorSerialized(false); + createRole(client(), "my-valid-mixed-role", initialRole); + updateRole("my-valid-mixed-role", randomValueOtherThan(initialRole, () -> randomRoleDescriptorSerialized(false))); + + // against old node, fail when description is provided either in update or create request + { + Exception e = expectThrows( + Exception.class, + () -> updateRole(oldVersionClient, "my-valid-mixed-role", randomRoleDescriptorSerialized(true)) + ); + assertThat( + e.getMessage(), + allOf(containsString("failed to parse role"), containsString("unexpected field [description]")) + ); + } + { + Exception e = expectThrows( + Exception.class, + () -> createRole(oldVersionClient, "my-invalid-mixed-role", randomRoleDescriptorSerialized(true)) + ); + assertThat( + e.getMessage(), + containsString("failed to parse role [my-invalid-mixed-role]. unexpected field [description]") + ); + } + + // and against new node in a mixed cluster we should fail + { + Exception e = expectThrows( + Exception.class, + () -> createRole(newVersionClient, "my-invalid-mixed-role", randomRoleDescriptorSerialized(true)) + ); + assertThat( + e.getMessage(), + containsString( + "all nodes must have version [" + + TransportVersions.SECURITY_ROLE_DESCRIPTION.toReleaseVersion() + + "] or higher to support specifying role description" + ) + ); + } + { + Exception e = expectThrows( + Exception.class, + () -> updateRole(newVersionClient, "my-valid-mixed-role", randomRoleDescriptorSerialized(true)) + ); + assertThat( + e.getMessage(), + containsString( + "all nodes must have version [" + + TransportVersions.SECURITY_ROLE_DESCRIPTION.toReleaseVersion() + + "] or higher to support specifying role description" + ) + ); + } + } finally { + this.closeClientsByVersion(); + } + } + case UPGRADED -> { + // on upgraded cluster which supports new description field + // create/update requests should succeed either way (with or without description) + final String initialRole = randomRoleDescriptorSerialized(randomBoolean()); + createRole(client(), "my-valid-upgraded-role", initialRole); + updateRole( + "my-valid-upgraded-role", + randomValueOtherThan(initialRole, () -> randomRoleDescriptorSerialized(randomBoolean())) + ); + } + } + } + + private void createRole(RestClient client, String roleName, String role) throws IOException { + final Request createRoleRequest = new Request("POST", "_security/role/" + roleName); + createRoleRequest.setJsonEntity(role); + var createRoleResponse = client.performRequest(createRoleRequest); + assertOK(createRoleResponse); + } + + private void updateRole(String roleName, String payload) throws IOException { + updateRole(client(), roleName, payload); + } + + private void updateRole(RestClient client, String roleName, String payload) throws IOException { + final Request updateRequest = new Request("PUT", "_security/role/" + roleName); + updateRequest.setJsonEntity(payload); + boolean created = assertOKAndCreateObjectPath(client.performRequest(updateRequest)).evaluate("role.created"); + assertThat(created, equalTo(false)); + } + + private static String randomRoleDescriptorSerialized(boolean includeDescription) { + try { + return XContentTestUtils.convertToXContent( + XContentTestUtils.convertToMap(randomRoleDescriptor(includeDescription)), + XContentType.JSON + ).utf8ToString(); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + } + + private boolean nodeSupportRoleDescription(Map nodeDetails) { + String nodeVersionString = (String) nodeDetails.get("version"); + TransportVersion transportVersion = getTransportVersionWithFallback( + nodeVersionString, + nodeDetails.get("transport_version"), + () -> TransportVersions.ZERO + ); + + if (transportVersion.equals(TransportVersions.ZERO)) { + // In cases where we were not able to find a TransportVersion, a pre-8.8.0 node answered about a newer (upgraded) node. + // In that case, the node will be current (upgraded), and remote indices are supported for sure. + var nodeIsCurrent = nodeVersionString.equals(Build.current().version()); + assertTrue(nodeIsCurrent); + return true; + } + return transportVersion.onOrAfter(TransportVersions.SECURITY_ROLE_DESCRIPTION); + } + + private void createClientsByVersion() throws IOException { + var clientsByCapability = getRestClientByCapability(); + if (clientsByCapability.size() == 2) { + for (Map.Entry client : clientsByCapability.entrySet()) { + if (client.getKey() == false) { + oldVersionClient = client.getValue(); + } else { + newVersionClient = client.getValue(); + } + } + assertThat(oldVersionClient, notNullValue()); + assertThat(newVersionClient, notNullValue()); + } else { + fail("expected 2 versions during rolling upgrade but got: " + clientsByCapability.size()); + } + } + + private void closeClientsByVersion() throws IOException { + if (oldVersionClient != null) { + oldVersionClient.close(); + oldVersionClient = null; + } + if (newVersionClient != null) { + newVersionClient.close(); + newVersionClient = null; + } + } + + @SuppressWarnings("unchecked") + private Map getRestClientByCapability() throws IOException { + Response response = client().performRequest(new Request("GET", "_nodes")); + assertOK(response); + ObjectPath objectPath = ObjectPath.createFromResponse(response); + Map nodesAsMap = objectPath.evaluate("nodes"); + Map> hostsByCapability = new HashMap<>(); + for (Map.Entry entry : nodesAsMap.entrySet()) { + Map nodeDetails = (Map) entry.getValue(); + var capabilitySupported = nodeSupportRoleDescription(nodeDetails); + Map httpInfo = (Map) nodeDetails.get("http"); + hostsByCapability.computeIfAbsent(capabilitySupported, k -> new ArrayList<>()) + .add(HttpHost.create((String) httpInfo.get("publish_address"))); + } + Map clientsByCapability = new HashMap<>(); + for (var entry : hostsByCapability.entrySet()) { + clientsByCapability.put(entry.getKey(), buildClient(restClientSettings(), entry.getValue().toArray(new HttpHost[0]))); + } + return clientsByCapability; + } + + private static RoleDescriptor randomRoleDescriptor(boolean includeDescription) { + final Set excludedPrivileges = Set.of( + "cross_cluster_replication", + "cross_cluster_replication_internal", + "manage_data_stream_lifecycle" + ); + return new RoleDescriptor( + randomAlphaOfLengthBetween(3, 90), + randomSubsetOf(Set.of("all", "monitor", "none")).toArray(String[]::new), + randomIndicesPrivileges(0, 3, excludedPrivileges), + randomApplicationPrivileges(), + null, + generateRandomStringArray(5, randomIntBetween(2, 8), false, true), + randomRoleDescriptorMetadata(false), + Map.of(), + null, + null, + null, + includeDescription ? randomAlphaOfLength(20) : null + ); + } +} diff --git a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractAdLdapRealmTestCase.java b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractAdLdapRealmTestCase.java index 17363d58545c2..3d9e7f3828bc7 100644 --- a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractAdLdapRealmTestCase.java +++ b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractAdLdapRealmTestCase.java @@ -20,14 +20,11 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.ssl.SslVerificationMode; import org.elasticsearch.common.util.Maps; -import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; -import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Nullable; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.SecurityIntegTestCase; import org.elasticsearch.test.fixtures.smb.SmbTestContainer; import org.elasticsearch.test.fixtures.testcontainers.TestContainersThreadFilter; -import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequestBuilder; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingResponse; @@ -190,16 +187,11 @@ public void setupRoleMappings() throws Exception { Map> futures = Maps.newLinkedHashMapWithExpectedSize(content.size()); for (int i = 0; i < content.size(); i++) { final String name = "external_" + i; - final PutRoleMappingRequestBuilder builder; - try ( - XContentParser parser = XContentHelper.createParserNotCompressed( - LoggingDeprecationHandler.XCONTENT_PARSER_CONFIG, - new BytesArray(content.get(i)), - XContentType.JSON - ) - ) { - builder = new PutRoleMappingRequestBuilder(client()).source(name, parser); - } + final PutRoleMappingRequestBuilder builder = new PutRoleMappingRequestBuilder(client()).source( + name, + new BytesArray(content.get(i)), + XContentType.JSON + ); futures.put(name, builder.execute()); } for (String mappingName : futures.keySet()) {