From 1145e04b8520ab4dac6569b213ae0710eec0d5ca Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Przemys=C5=82aw=20Witek?= Date: Wed, 10 Jan 2024 13:06:45 +0100 Subject: [PATCH 01/75] Make `ParentTaskAssigningClient.getRemoteClusterClient` method also return `ParentTaskAssigningClient` (#100813) Currently, if we use `ParentTaskAssigningClient` and want to spawn a remote request, we need to: 1. get remote client using `.getRemoteClusterClient` method 2. call `.setParentTask` on the remote request method manually This PR makes it so the remote client obtained by calling `.getRemoteClusterClient` method is also a `ParentTaskAssigningClient` so there is no need to call `.setParentTask` on the child request anymore. --- docs/changelog/100813.yaml | 6 +++++ .../internal/ParentTaskAssigningClient.java | 8 ++++++ .../ParentTaskAssigningClientTests.java | 26 +++++++++++++++++++ 3 files changed, 40 insertions(+) create mode 100644 docs/changelog/100813.yaml diff --git a/docs/changelog/100813.yaml b/docs/changelog/100813.yaml new file mode 100644 index 0000000000000..476098b62c106 --- /dev/null +++ b/docs/changelog/100813.yaml @@ -0,0 +1,6 @@ +pr: 100813 +summary: Make `ParentTaskAssigningClient.getRemoteClusterClient` method also return + `ParentTaskAssigningClient` +area: Infra/Transport API +type: enhancement +issues: [] diff --git a/server/src/main/java/org/elasticsearch/client/internal/ParentTaskAssigningClient.java b/server/src/main/java/org/elasticsearch/client/internal/ParentTaskAssigningClient.java index 967e5c72efdd0..e6393393916b1 100644 --- a/server/src/main/java/org/elasticsearch/client/internal/ParentTaskAssigningClient.java +++ b/server/src/main/java/org/elasticsearch/client/internal/ParentTaskAssigningClient.java @@ -16,6 +16,8 @@ import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskId; +import java.util.concurrent.Executor; + /** * A {@linkplain Client} that sets the parent task on all requests that it makes. Use this to conveniently implement actions that cause * many other actions. @@ -58,4 +60,10 @@ protected void request.setParentTask(parentTask); super.doExecute(action, request, listener); } + + @Override + public ParentTaskAssigningClient getRemoteClusterClient(String clusterAlias, Executor responseExecutor) { + Client remoteClient = super.getRemoteClusterClient(clusterAlias, responseExecutor); + return new ParentTaskAssigningClient(remoteClient, parentTask); + } } diff --git a/server/src/test/java/org/elasticsearch/client/internal/ParentTaskAssigningClientTests.java b/server/src/test/java/org/elasticsearch/client/internal/ParentTaskAssigningClientTests.java index 2c2e131b8c5ad..0100c7cab5ba4 100644 --- a/server/src/test/java/org/elasticsearch/client/internal/ParentTaskAssigningClientTests.java +++ b/server/src/test/java/org/elasticsearch/client/internal/ParentTaskAssigningClientTests.java @@ -15,10 +15,17 @@ import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.search.ClearScrollRequest; import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.client.NoOpClient; +import java.util.concurrent.Executor; + +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; +import static org.mockito.Mockito.mock; + public class ParentTaskAssigningClientTests extends ESTestCase { public void testSetsParentId() { TaskId[] parentTaskId = new TaskId[] { new TaskId(randomAlphaOfLength(3), randomLong()) }; @@ -51,4 +58,23 @@ protected void client.unwrap().clearScroll(new ClearScrollRequest()); } } + + public void testRemoteClientIsAlsoAParentAssigningClient() { + TaskId parentTaskId = new TaskId(randomAlphaOfLength(3), randomLong()); + + try (var threadPool = createThreadPool()) { + final var mockClient = new NoOpClient(threadPool) { + @Override + public Client getRemoteClusterClient(String clusterAlias, Executor responseExecutor) { + return mock(Client.class); + } + }; + + final var client = new ParentTaskAssigningClient(mockClient, parentTaskId); + assertThat( + client.getRemoteClusterClient("remote-cluster", EsExecutors.DIRECT_EXECUTOR_SERVICE), + is(instanceOf(ParentTaskAssigningClient.class)) + ); + } + } } From ba0d1a4823cdc201fa57f7d1bfd2a69eb5c00cc0 Mon Sep 17 00:00:00 2001 From: Chris Hegarty <62058229+ChrisHegarty@users.noreply.github.com> Date: Wed, 10 Jan 2024 12:15:18 +0000 Subject: [PATCH 02/75] Add ES|QL async security tests (#104137) This commit expands the current ES|QL security tests to cover async. --- .../test/rest/ESRestTestCase.java | 9 +- .../xpack/core/esql/EsqlAsyncActionNames.java | 15 ++ .../xpack/esql/EsqlAsyncSecurityIT.java | 134 ++++++++++++++++++ .../xpack/esql/EsqlSecurityIT.java | 64 +++++---- .../esql/action/EsqlAsyncGetResultAction.java | 3 +- .../xpack/security/authz/RBACEngine.java | 2 + 6 files changed, 195 insertions(+), 32 deletions(-) create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/esql/EsqlAsyncActionNames.java create mode 100644 x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlAsyncSecurityIT.java diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index 94b1d4ab321ee..20cd1997fd70e 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -11,6 +11,7 @@ import io.netty.handler.codec.http.HttpMethod; import org.apache.http.Header; +import org.apache.http.HttpEntity; import org.apache.http.HttpHost; import org.apache.http.HttpStatus; import org.apache.http.client.methods.HttpDelete; @@ -157,14 +158,18 @@ public abstract class ESRestTestCase extends ESTestCase { * Convert the entity from a {@link Response} into a map of maps. */ public static Map entityAsMap(Response response) throws IOException { - XContentType xContentType = XContentType.fromMediaType(response.getEntity().getContentType().getValue()); + return entityAsMap(response.getEntity()); + } + + public static Map entityAsMap(HttpEntity entity) throws IOException { + XContentType xContentType = XContentType.fromMediaType(entity.getContentType().getValue()); // EMPTY and THROW are fine here because `.map` doesn't use named x content or deprecation try ( XContentParser parser = xContentType.xContent() .createParser( XContentParserConfiguration.EMPTY.withRegistry(NamedXContentRegistry.EMPTY) .withDeprecationHandler(DeprecationHandler.THROW_UNSUPPORTED_OPERATION), - response.getEntity().getContent() + entity.getContent() ) ) { return parser.map(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/esql/EsqlAsyncActionNames.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/esql/EsqlAsyncActionNames.java new file mode 100644 index 0000000000000..81ab54fc2db5f --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/esql/EsqlAsyncActionNames.java @@ -0,0 +1,15 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.esql; + +/** + * Exposes ES|QL async action names for RBACEngine. + */ +public class EsqlAsyncActionNames { + public static final String ESQL_ASYNC_GET_RESULT_ACTION_NAME = "indices:data/read/esql/async/get"; +} diff --git a/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlAsyncSecurityIT.java b/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlAsyncSecurityIT.java new file mode 100644 index 0000000000000..544eb82fb5ace --- /dev/null +++ b/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlAsyncSecurityIT.java @@ -0,0 +1,134 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql; + +import org.elasticsearch.client.Request; +import org.elasticsearch.client.RequestOptions; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.common.Strings; +import org.elasticsearch.logging.LogManager; +import org.elasticsearch.logging.Logger; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.json.JsonXContent; + +import java.io.IOException; +import java.util.Locale; + +import static org.elasticsearch.core.TimeValue.timeValueNanos; +import static org.hamcrest.Matchers.either; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; + +public class EsqlAsyncSecurityIT extends EsqlSecurityIT { + + private static final Logger LOGGER = LogManager.getLogger(EsqlAsyncSecurityIT.class); + + @Override + protected Response runESQLCommand(String user, String command) throws IOException { + var response = runAsync(user, command); + assertOK(response); + var respMap = entityAsMap(response.getEntity()); + String id = (String) respMap.get("id"); + assertThat((boolean) respMap.get("is_running"), either(is(true)).or(is(false))); + var getResponse = runAsyncGet(user, id); + assertOK(getResponse); + var deleteResponse = runAsyncDelete(user, id); + assertOK(deleteResponse); + return getResponse; + } + + @Override + public void testUnauthorizedIndices() throws IOException { + super.testUnauthorizedIndices(); + { + var response = runAsync("user1", "from index-user1 | stats sum(value)"); + assertOK(response); + var respMap = entityAsMap(response.getEntity()); + String id = (String) respMap.get("id"); + assertThat((boolean) respMap.get("is_running"), either(is(true)).or(is(false))); + + var getResponse = runAsyncGet("user1", id); // sanity + assertOK(getResponse); + ResponseException error; + error = expectThrows(ResponseException.class, () -> runAsyncGet("user2", id)); + // resource not found exception if the authenticated user is not the creator of the original task + assertThat(error.getResponse().getStatusLine().getStatusCode(), equalTo(404)); + + error = expectThrows(ResponseException.class, () -> runAsyncDelete("user2", id)); + // resource not found exception if the authenticated user is not the creator of the original task + assertThat(error.getResponse().getStatusLine().getStatusCode(), equalTo(404)); + } + { + var response = runAsync("user2", "from index-user2 | stats sum(value)"); + assertOK(response); + var respMap = entityAsMap(response.getEntity()); + String id = (String) respMap.get("id"); + assertThat((boolean) respMap.get("is_running"), either(is(true)).or(is(false))); + + var getResponse = runAsyncGet("user2", id); // sanity + assertOK(getResponse); + ResponseException error; + error = expectThrows(ResponseException.class, () -> runAsyncGet("user1", id)); + assertThat(error.getResponse().getStatusLine().getStatusCode(), equalTo(404)); + + error = expectThrows(ResponseException.class, () -> runAsyncDelete("user1", id)); + assertThat(error.getResponse().getStatusLine().getStatusCode(), equalTo(404)); + } + } + + // Keep_on_complete is always true, so we will always get an id + private Response runAsync(String user, String command) throws IOException { + if (command.toLowerCase(Locale.ROOT).contains("limit") == false) { + // add a (high) limit to avoid warnings on default limit + command += " | limit 10000000"; + } + XContentBuilder json = JsonXContent.contentBuilder(); + json.startObject(); + json.field("query", command); + addRandomPragmas(json); + json.field("wait_for_completion_timeout", timeValueNanos(randomIntBetween(1, 1000))); + json.field("keep_on_completion", "true"); + json.endObject(); + Request request = new Request("POST", "_query/async"); + request.setJsonEntity(Strings.toString(json)); + request.setOptions(RequestOptions.DEFAULT.toBuilder().addHeader("es-security-runas-user", user)); + logRequest(request); + Response response = client().performRequest(request); + logResponse(response); + return response; + } + + private Response runAsyncGet(String user, String id) throws IOException { + Request getRequest = new Request("GET", "_query/async/" + id + "?wait_for_completion_timeout=60s"); + getRequest.setOptions(RequestOptions.DEFAULT.toBuilder().addHeader("es-security-runas-user", user)); + logRequest(getRequest); + var response = client().performRequest(getRequest); + logResponse(response); + return response; + } + + private Response runAsyncDelete(String user, String id) throws IOException { + Request getRequest = new Request("DELETE", "_query/async/" + id); + getRequest.setOptions(RequestOptions.DEFAULT.toBuilder().addHeader("es-security-runas-user", user)); + logRequest(getRequest); + var response = client().performRequest(getRequest); + logResponse(response); + return response; + } + + static void logRequest(Request request) throws IOException { + LOGGER.info("REQUEST={}", request); + var entity = request.getEntity(); + if (entity != null) LOGGER.info("REQUEST body={}", entityAsMap(entity)); + } + + static void logResponse(Response response) { + LOGGER.info("RESPONSE={}", response); + } +} diff --git a/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java b/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java index 98ec411569af5..e363fa64c594d 100644 --- a/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java +++ b/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java @@ -114,7 +114,7 @@ public void testAllowedIndices() throws Exception { } } - public void testUnauthorizedIndices() { + public void testUnauthorizedIndices() throws IOException { ResponseException error; error = expectThrows(ResponseException.class, () -> runESQLCommand("user1", "from index-user2 | stats sum(value)")); assertThat(error.getResponse().getStatusLine().getStatusCode(), equalTo(400)); @@ -271,41 +271,47 @@ private void removeEnrichPolicy() throws Exception { client().performRequest(new Request("DELETE", "_enrich/policy/songs")); } - private Response runESQLCommand(String user, String command) throws IOException { + protected Response runESQLCommand(String user, String command) throws IOException { if (command.toLowerCase(Locale.ROOT).contains("limit") == false) { // add a (high) limit to avoid warnings on default limit command += " | limit 10000000"; } - Settings pragmas = Settings.EMPTY; - if (Build.current().isSnapshot()) { - Settings.Builder settings = Settings.builder(); - if (randomBoolean()) { - settings.put("page_size", between(1, 5)); - } - if (randomBoolean()) { - settings.put("exchange_buffer_size", between(1, 2)); - } - if (randomBoolean()) { - settings.put("data_partitioning", randomFrom("shard", "segment", "doc")); - } - if (randomBoolean()) { - settings.put("enrich_max_workers", between(1, 5)); - } - pragmas = settings.build(); - } - XContentBuilder query = JsonXContent.contentBuilder(); - query.startObject(); - query.field("query", command); - if (pragmas != Settings.EMPTY) { - query.startObject("pragma"); - query.value(pragmas); - query.endObject(); - } - query.endObject(); + XContentBuilder json = JsonXContent.contentBuilder(); + json.startObject(); + json.field("query", command); + addRandomPragmas(json); + json.endObject(); Request request = new Request("POST", "_query"); - request.setJsonEntity("{\"query\":\"" + command + "\"}"); + request.setJsonEntity(Strings.toString(json)); request.setOptions(RequestOptions.DEFAULT.toBuilder().addHeader("es-security-runas-user", user)); return client().performRequest(request); } + static void addRandomPragmas(XContentBuilder builder) throws IOException { + if (Build.current().isSnapshot()) { + Settings pragmas = randomPragmas(); + if (pragmas != Settings.EMPTY) { + builder.startObject("pragma"); + builder.value(pragmas); + builder.endObject(); + } + } + } + + static Settings randomPragmas() { + Settings.Builder settings = Settings.builder(); + if (randomBoolean()) { + settings.put("page_size", between(1, 5)); + } + if (randomBoolean()) { + settings.put("exchange_buffer_size", between(1, 2)); + } + if (randomBoolean()) { + settings.put("data_partitioning", randomFrom("shard", "segment", "doc")); + } + if (randomBoolean()) { + settings.put("enrich_max_workers", between(1, 5)); + } + return settings.build(); + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlAsyncGetResultAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlAsyncGetResultAction.java index 1603dd8fd3746..f6593dccb9c49 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlAsyncGetResultAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlAsyncGetResultAction.java @@ -8,12 +8,13 @@ package org.elasticsearch.xpack.esql.action; import org.elasticsearch.action.ActionType; +import org.elasticsearch.xpack.core.esql.EsqlAsyncActionNames; public class EsqlAsyncGetResultAction extends ActionType { public static final EsqlAsyncGetResultAction INSTANCE = new EsqlAsyncGetResultAction(); - public static final String NAME = "indices:data/read/esql/async/get"; + public static final String NAME = EsqlAsyncActionNames.ESQL_ASYNC_GET_RESULT_ACTION_NAME; private EsqlAsyncGetResultAction() { super(NAME, in -> { throw new IllegalArgumentException("can't transport EsqlAsyncGetResultAction"); }); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java index f92252ebe851c..1b1d7c789b21d 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java @@ -43,6 +43,7 @@ import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.xpack.core.async.TransportDeleteAsyncResultAction; import org.elasticsearch.xpack.core.eql.EqlAsyncActionNames; +import org.elasticsearch.xpack.core.esql.EsqlAsyncActionNames; import org.elasticsearch.xpack.core.search.action.GetAsyncSearchAction; import org.elasticsearch.xpack.core.search.action.SubmitAsyncSearchAction; import org.elasticsearch.xpack.core.security.action.apikey.GetApiKeyAction; @@ -963,6 +964,7 @@ private static boolean isAsyncRelatedAction(String action) { || action.equals(GetAsyncSearchAction.NAME) || action.equals(TransportDeleteAsyncResultAction.TYPE.name()) || action.equals(EqlAsyncActionNames.EQL_ASYNC_GET_RESULT_ACTION_NAME) + || action.equals(EsqlAsyncActionNames.ESQL_ASYNC_GET_RESULT_ACTION_NAME) || action.equals(SqlAsyncActionNames.SQL_ASYNC_GET_RESULT_ACTION_NAME); } From 73f537170b70e0191f607902e0aa3ad124950abe Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Wed, 10 Jan 2024 07:46:42 -0500 Subject: [PATCH 03/75] Update nested knn search documentation about inner-hits (#104154) Adding a link tag for inner hits behavior and kNN search. Additionally adding a note that if you are using multiple knn clauses, that the inner hit name should be provided. --- docs/reference/search/search-your-data/knn-search.asciidoc | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/docs/reference/search/search-your-data/knn-search.asciidoc b/docs/reference/search/search-your-data/knn-search.asciidoc index 496e0cf1b9d4f..a847d9a306b7c 100644 --- a/docs/reference/search/search-your-data/knn-search.asciidoc +++ b/docs/reference/search/search-your-data/knn-search.asciidoc @@ -814,12 +814,19 @@ Now we have filtered based on the top level `"creation_time"` and only one docum ---- // TESTRESPONSE[s/"took": 4/"took" : "$body.took"/] +[discrete] +[[nested-knn-search-inner-hits]] +==== Nested kNN Search with Inner hits + Additionally, if you wanted to extract the nearest passage for a matched document, you can supply <> to the `knn` clause. NOTE: `inner_hits` for kNN will only ever return a single hit, the nearest passage vector. Setting `"size"` to any value greater than `1` will have no effect on the results. +NOTE: When using `inner_hits` and multiple `knn` clauses, be sure to specify the <> +field. Otherwise, a naming clash can occur and fail the search request. + [source,console] ---- POST passage_vectors/_search From 1d7d9dc1eed2aeda08dd7a62a8a1893c5113a95b Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Wed, 10 Jan 2024 14:53:35 +0100 Subject: [PATCH 04/75] Refactor RollupResponseTranslator slightly to ease ref-counting (#104196) Work with the combined MSearchResponse here throughout to make ref-counting the full response-array in one possible. --- .../rollup/RollupResponseTranslator.java | 11 +- .../action/TransportRollupSearchAction.java | 8 +- .../RollupResponseTranslationTests.java | 345 ++++++++++-------- .../job/RollupIndexerIndexingTests.java | 3 +- 4 files changed, 211 insertions(+), 156 deletions(-) diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupResponseTranslator.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupResponseTranslator.java index ed3a3f294c65c..f7394ec12a779 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupResponseTranslator.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupResponseTranslator.java @@ -73,10 +73,10 @@ public static SearchResponse verifyResponse(MultiSearchResponse.Item normalRespo * on the translation conventions */ public static SearchResponse translateResponse( - MultiSearchResponse.Item[] rolledMsearch, + MultiSearchResponse mSearchResponse, AggregationReduceContext.Builder reduceContextBuilder ) throws Exception { - + var rolledMsearch = mSearchResponse.getResponses(); assert rolledMsearch.length > 0; List responses = new ArrayList<>(); for (MultiSearchResponse.Item item : rolledMsearch) { @@ -199,13 +199,13 @@ public static SearchResponse translateResponse( * so that the final product looks like a regular aggregation response, allowing it to be * reduced/merged into the response from the un-rolled index * - * @param msearchResponses The responses from the msearch, where the first response is the live-index response + * @param mSearchResponse The response from the msearch, where the first response is the live-index response */ public static SearchResponse combineResponses( - MultiSearchResponse.Item[] msearchResponses, + MultiSearchResponse mSearchResponse, AggregationReduceContext.Builder reduceContextBuilder ) throws Exception { - + var msearchResponses = mSearchResponse.getResponses(); assert msearchResponses.length >= 2; boolean first = true; @@ -242,6 +242,7 @@ public static SearchResponse combineResponses( // If we only have a live index left, just return it directly. We know it can't be an error already if (rolledResponses.isEmpty() && liveResponse != null) { + liveResponse.mustIncRef(); return liveResponse; } else if (rolledResponses.isEmpty()) { throw new ResourceNotFoundException("No indices (live or rollup) found during rollup search"); diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java index ff167c5586dce..2df415fbe02dc 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java @@ -154,14 +154,16 @@ static SearchResponse processResponses( ) throws Exception { if (rollupContext.hasLiveIndices() && rollupContext.hasRollupIndices()) { // Both - return RollupResponseTranslator.combineResponses(msearchResponse.getResponses(), reduceContextBuilder); + return RollupResponseTranslator.combineResponses(msearchResponse, reduceContextBuilder); } else if (rollupContext.hasLiveIndices()) { // Only live assert msearchResponse.getResponses().length == 1; - return RollupResponseTranslator.verifyResponse(msearchResponse.getResponses()[0]); + var res = RollupResponseTranslator.verifyResponse(msearchResponse.getResponses()[0]); + res.mustIncRef(); + return res; } else if (rollupContext.hasRollupIndices()) { // Only rollup - return RollupResponseTranslator.translateResponse(msearchResponse.getResponses(), reduceContextBuilder); + return RollupResponseTranslator.translateResponse(msearchResponse, reduceContextBuilder); } throw new RuntimeException("MSearch response was empty, cannot unroll RollupSearch results"); } diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupResponseTranslationTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupResponseTranslationTests.java index 7e814230a2223..e9f882731521f 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupResponseTranslationTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupResponseTranslationTests.java @@ -43,6 +43,7 @@ import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.DocValueFormat; +import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.Aggregations; @@ -91,56 +92,70 @@ public class RollupResponseTranslationTests extends AggregatorTestCase { public void testLiveFailure() { - MultiSearchResponse.Item[] failure = new MultiSearchResponse.Item[] { - new MultiSearchResponse.Item(null, new RuntimeException("foo")), - new MultiSearchResponse.Item(null, null) }; - - Exception e = expectThrows( - RuntimeException.class, - () -> RollupResponseTranslator.combineResponses(failure, InternalAggregationTestCase.emptyReduceContextBuilder()) + MultiSearchResponse failure = new MultiSearchResponse( + new MultiSearchResponse.Item[] { + new MultiSearchResponse.Item(null, new RuntimeException("foo")), + new MultiSearchResponse.Item(null, null) }, + 0L ); - assertThat(e.getMessage(), equalTo("foo")); + try { + Exception e = expectThrows( + RuntimeException.class, + () -> RollupResponseTranslator.combineResponses(failure, InternalAggregationTestCase.emptyReduceContextBuilder()) + ); + assertThat(e.getMessage(), equalTo("foo")); - e = expectThrows( - RuntimeException.class, - () -> RollupResponseTranslator.translateResponse(failure, InternalAggregationTestCase.emptyReduceContextBuilder()) - ); - assertThat(e.getMessage(), equalTo("foo")); + e = expectThrows( + RuntimeException.class, + () -> RollupResponseTranslator.translateResponse(failure, InternalAggregationTestCase.emptyReduceContextBuilder()) + ); + assertThat(e.getMessage(), equalTo("foo")); - e = expectThrows(RuntimeException.class, () -> RollupResponseTranslator.verifyResponse(failure[0])); - assertThat(e.getMessage(), equalTo("foo")); + e = expectThrows(RuntimeException.class, () -> RollupResponseTranslator.verifyResponse(failure.getResponses()[0])); + assertThat(e.getMessage(), equalTo("foo")); + } finally { + failure.decRef(); + } } public void testRollupFailure() { - MultiSearchResponse.Item[] failure = new MultiSearchResponse.Item[] { - new MultiSearchResponse.Item(null, new RuntimeException("rollup failure")) }; - - Exception e = expectThrows( - RuntimeException.class, - () -> RollupResponseTranslator.translateResponse(failure, InternalAggregationTestCase.emptyReduceContextBuilder()) + MultiSearchResponse failure = new MultiSearchResponse( + new MultiSearchResponse.Item[] { new MultiSearchResponse.Item(null, new RuntimeException("rollup failure")) }, + 0L ); - assertThat(e.getMessage(), equalTo("rollup failure")); + try { + Exception e = expectThrows( + RuntimeException.class, + () -> RollupResponseTranslator.translateResponse(failure, InternalAggregationTestCase.emptyReduceContextBuilder()) + ); + assertThat(e.getMessage(), equalTo("rollup failure")); + } finally { + failure.decRef(); + } } public void testLiveMissingRollupMissing() { - MultiSearchResponse.Item[] failure = new MultiSearchResponse.Item[] { - new MultiSearchResponse.Item(null, new IndexNotFoundException("foo")), - new MultiSearchResponse.Item(null, new IndexNotFoundException("foo")) }; - - BigArrays bigArrays = new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), new NoneCircuitBreakerService()); - ScriptService scriptService = mock(ScriptService.class); - - ResourceNotFoundException e = expectThrows( - ResourceNotFoundException.class, - () -> RollupResponseTranslator.combineResponses(failure, InternalAggregationTestCase.emptyReduceContextBuilder()) - ); - assertThat( - e.getMessage(), - equalTo( - "Index [[foo]] was not found, likely because it was deleted while the request was in-flight. " - + "Rollup does not support partial search results, please try the request again." - ) + MultiSearchResponse failure = new MultiSearchResponse( + new MultiSearchResponse.Item[] { + new MultiSearchResponse.Item(null, new IndexNotFoundException("foo")), + new MultiSearchResponse.Item(null, new IndexNotFoundException("foo")) }, + 0L ); + try { + ResourceNotFoundException e = expectThrows( + ResourceNotFoundException.class, + () -> RollupResponseTranslator.combineResponses(failure, InternalAggregationTestCase.emptyReduceContextBuilder()) + ); + assertThat( + e.getMessage(), + equalTo( + "Index [[foo]] was not found, likely because it was deleted while the request was in-flight. " + + "Rollup does not support partial search results, please try the request again." + ) + ); + } finally { + failure.decRef(); + } } public void testMissingLiveIndex() throws Exception { @@ -175,21 +190,27 @@ public void testMissingLiveIndex() throws Exception { Aggregations mockAggsWithout = InternalAggregations.from(aggTree); when(responseWithout.getAggregations()).thenReturn(mockAggsWithout); - MultiSearchResponse.Item[] msearch = new MultiSearchResponse.Item[] { - new MultiSearchResponse.Item(null, new IndexNotFoundException("foo")), - new MultiSearchResponse.Item(responseWithout, null) }; - - ResourceNotFoundException e = expectThrows( - ResourceNotFoundException.class, - () -> RollupResponseTranslator.combineResponses(msearch, InternalAggregationTestCase.emptyReduceContextBuilder()) - ); - assertThat( - e.getMessage(), - equalTo( - "Index [[foo]] was not found, likely because it was deleted while the request was in-flight. " - + "Rollup does not support partial search results, please try the request again." - ) + MultiSearchResponse msearch = new MultiSearchResponse( + new MultiSearchResponse.Item[] { + new MultiSearchResponse.Item(null, new IndexNotFoundException("foo")), + new MultiSearchResponse.Item(responseWithout, null) }, + 0L ); + try { + ResourceNotFoundException e = expectThrows( + ResourceNotFoundException.class, + () -> RollupResponseTranslator.combineResponses(msearch, InternalAggregationTestCase.emptyReduceContextBuilder()) + ); + assertThat( + e.getMessage(), + equalTo( + "Index [[foo]] was not found, likely because it was deleted while the request was in-flight. " + + "Rollup does not support partial search results, please try the request again." + ) + ); + } finally { + msearch.decRef(); + } } public void testRolledMissingAggs() throws Exception { @@ -198,43 +219,52 @@ public void testRolledMissingAggs() throws Exception { when(responseWithout.getAggregations()).thenReturn(InternalAggregations.EMPTY); - MultiSearchResponse.Item[] msearch = new MultiSearchResponse.Item[] { new MultiSearchResponse.Item(responseWithout, null) }; - - SearchResponse response = RollupResponseTranslator.translateResponse( - msearch, - InternalAggregationTestCase.emptyReduceContextBuilder() + MultiSearchResponse msearch = new MultiSearchResponse( + new MultiSearchResponse.Item[] { new MultiSearchResponse.Item(responseWithout, null) }, + 0L ); try { - assertNotNull(response); - Aggregations responseAggs = response.getAggregations(); - assertThat(responseAggs.asList().size(), equalTo(0)); + SearchResponse response = RollupResponseTranslator.translateResponse( + msearch, + InternalAggregationTestCase.emptyReduceContextBuilder() + ); + try { + assertNotNull(response); + Aggregations responseAggs = response.getAggregations(); + assertThat(responseAggs.asList().size(), equalTo(0)); + } finally { + // this SearchResponse is not a mock, so must be decRef'd + response.decRef(); + } } finally { - // this SearchResponse is not a mock, so must be decRef'd - response.decRef(); + msearch.decRef(); } } public void testMissingRolledIndex() { SearchResponse response = mock(SearchResponse.class); - MultiSearchResponse.Item[] msearch = new MultiSearchResponse.Item[] { - new MultiSearchResponse.Item(response, null), - new MultiSearchResponse.Item(null, new IndexNotFoundException("foo")) }; - - BigArrays bigArrays = new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), new NoneCircuitBreakerService()); - ScriptService scriptService = mock(ScriptService.class); - - ResourceNotFoundException e = expectThrows( - ResourceNotFoundException.class, - () -> RollupResponseTranslator.combineResponses(msearch, InternalAggregationTestCase.emptyReduceContextBuilder()) - ); - assertThat( - e.getMessage(), - equalTo( - "Index [[foo]] was not found, likely because it was deleted while the request was in-flight. " - + "Rollup does not support partial search results, please try the request again." - ) + MultiSearchResponse msearch = new MultiSearchResponse( + new MultiSearchResponse.Item[] { + new MultiSearchResponse.Item(response, null), + new MultiSearchResponse.Item(null, new IndexNotFoundException("foo")) }, + 0L ); + try { + ResourceNotFoundException e = expectThrows( + ResourceNotFoundException.class, + () -> RollupResponseTranslator.combineResponses(msearch, InternalAggregationTestCase.emptyReduceContextBuilder()) + ); + assertThat( + e.getMessage(), + equalTo( + "Index [[foo]] was not found, likely because it was deleted while the request was in-flight. " + + "Rollup does not support partial search results, please try the request again." + ) + ); + } finally { + msearch.decRef(); + } } public void testVerifyNormal() throws Exception { @@ -283,41 +313,50 @@ public void testTranslateRollup() throws Exception { Aggregations mockAggs = InternalAggregations.from(aggTree); when(response.getAggregations()).thenReturn(mockAggs); - MultiSearchResponse.Item item = new MultiSearchResponse.Item(response, null); - - // this is not a mock, so needs to be decRef'd - SearchResponse finalResponse = RollupResponseTranslator.translateResponse( - new MultiSearchResponse.Item[] { item }, - InternalAggregationTestCase.emptyReduceContextBuilder() + MultiSearchResponse multiSearchResponse = new MultiSearchResponse( + new MultiSearchResponse.Item[] { new MultiSearchResponse.Item(response, null) }, + 0L ); try { - assertNotNull(finalResponse); - Aggregations responseAggs = finalResponse.getAggregations(); - assertNotNull(finalResponse); - Avg avg = responseAggs.get("foo"); - assertThat(avg.getValue(), equalTo(5.0)); + // this is not a mock, so needs to be decRef'd + SearchResponse finalResponse = RollupResponseTranslator.translateResponse( + multiSearchResponse, + InternalAggregationTestCase.emptyReduceContextBuilder() + ); + try { + assertNotNull(finalResponse); + Aggregations responseAggs = finalResponse.getAggregations(); + assertNotNull(finalResponse); + Avg avg = responseAggs.get("foo"); + assertThat(avg.getValue(), equalTo(5.0)); + } finally { + finalResponse.decRef(); + } } finally { - finalResponse.decRef(); + multiSearchResponse.decRef(); } } public void testTranslateMissingRollup() { - MultiSearchResponse.Item missing = new MultiSearchResponse.Item(null, new IndexNotFoundException("foo")); - - ResourceNotFoundException e = expectThrows( - ResourceNotFoundException.class, - () -> RollupResponseTranslator.translateResponse( - new MultiSearchResponse.Item[] { missing }, - InternalAggregationTestCase.emptyReduceContextBuilder() - ) - ); - assertThat( - e.getMessage(), - equalTo( - "Index [foo] was not found, likely because it was deleted while the request was in-flight. " - + "Rollup does not support partial search results, please try the request again." - ) + MultiSearchResponse missing = new MultiSearchResponse( + new MultiSearchResponse.Item[] { new MultiSearchResponse.Item(null, new IndexNotFoundException("foo")) }, + 0L ); + try { + ResourceNotFoundException e = expectThrows( + ResourceNotFoundException.class, + () -> RollupResponseTranslator.translateResponse(missing, InternalAggregationTestCase.emptyReduceContextBuilder()) + ); + assertThat( + e.getMessage(), + equalTo( + "Index [foo] was not found, likely because it was deleted while the request was in-flight. " + + "Rollup does not support partial search results, please try the request again." + ) + ); + } finally { + missing.decRef(); + } } public void testMissingFilter() { @@ -339,13 +378,16 @@ public void testMissingFilter() { when(responseWithout.getAggregations()).thenReturn(mockAggsWithout); MultiSearchResponse.Item rolledResponse = new MultiSearchResponse.Item(responseWithout, null); - MultiSearchResponse.Item[] msearch = new MultiSearchResponse.Item[] { unrolledResponse, rolledResponse }; - - Exception e = expectThrows( - RuntimeException.class, - () -> RollupResponseTranslator.combineResponses(msearch, InternalAggregationTestCase.emptyReduceContextBuilder()) - ); - assertThat(e.getMessage(), containsString("Expected [bizzbuzz] to be a FilterAggregation")); + MultiSearchResponse msearch = new MultiSearchResponse(new MultiSearchResponse.Item[] { unrolledResponse, rolledResponse }, 0L); + try { + Exception e = expectThrows( + RuntimeException.class, + () -> RollupResponseTranslator.combineResponses(msearch, InternalAggregationTestCase.emptyReduceContextBuilder()) + ); + assertThat(e.getMessage(), containsString("Expected [bizzbuzz] to be a FilterAggregation")); + } finally { + msearch.decRef(); + } } public void testMatchingNameNotFilter() { @@ -366,13 +408,16 @@ public void testMatchingNameNotFilter() { when(responseWithout.getAggregations()).thenReturn(mockAggsWithout); MultiSearchResponse.Item rolledResponse = new MultiSearchResponse.Item(responseWithout, null); - MultiSearchResponse.Item[] msearch = new MultiSearchResponse.Item[] { unrolledResponse, rolledResponse }; - - Exception e = expectThrows( - RuntimeException.class, - () -> RollupResponseTranslator.combineResponses(msearch, InternalAggregationTestCase.emptyReduceContextBuilder()) - ); - assertThat(e.getMessage(), equalTo("Expected [filter_foo] to be a FilterAggregation, but was [Max]")); + MultiSearchResponse msearch = new MultiSearchResponse(new MultiSearchResponse.Item[] { unrolledResponse, rolledResponse }, 0L); + try { + Exception e = expectThrows( + RuntimeException.class, + () -> RollupResponseTranslator.combineResponses(msearch, InternalAggregationTestCase.emptyReduceContextBuilder()) + ); + assertThat(e.getMessage(), equalTo("Expected [filter_foo] to be a FilterAggregation, but was [Max]")); + } finally { + msearch.decRef(); + } } public void testSimpleReduction() throws Exception { @@ -417,24 +462,27 @@ public void testSimpleReduction() throws Exception { when(responseWithout.getAggregations()).thenReturn(mockAggsWithout); MultiSearchResponse.Item rolledResponse = new MultiSearchResponse.Item(responseWithout, null); - MultiSearchResponse.Item[] msearch = new MultiSearchResponse.Item[] { unrolledResponse, rolledResponse }; - - // this SearchResponse is not a mock, so needs a decRef - SearchResponse response = RollupResponseTranslator.combineResponses( - msearch, - InternalAggregationTestCase.emptyReduceContextBuilder( - new AggregatorFactories.Builder().addAggregator(new MaxAggregationBuilder("foo")) - .addAggregator(new MaxAggregationBuilder("foo." + RollupField.COUNT_FIELD)) - ) - ); + MultiSearchResponse msearch = new MultiSearchResponse(new MultiSearchResponse.Item[] { unrolledResponse, rolledResponse }, 0L); try { - assertNotNull(response); - Aggregations responseAggs = response.getAggregations(); - assertNotNull(responseAggs); - Avg avg = responseAggs.get("foo"); - assertThat(avg.getValue(), equalTo(5.0)); + // this SearchResponse is not a mock, so needs a decRef + SearchResponse response = RollupResponseTranslator.combineResponses( + msearch, + InternalAggregationTestCase.emptyReduceContextBuilder( + new AggregatorFactories.Builder().addAggregator(new MaxAggregationBuilder("foo")) + .addAggregator(new MaxAggregationBuilder("foo." + RollupField.COUNT_FIELD)) + ) + ); + try { + assertNotNull(response); + Aggregations responseAggs = response.getAggregations(); + assertNotNull(responseAggs); + Avg avg = responseAggs.get("foo"); + assertThat(avg.getValue(), equalTo(5.0)); + } finally { + response.decRef(); + } } finally { - response.decRef(); + msearch.decRef(); } } @@ -515,7 +563,7 @@ public void testMismatch() throws IOException { // TODO SearchResponse.Clusters is not public, using null for now. Should fix upstream. MultiSearchResponse.Item unrolledItem = new MultiSearchResponse.Item( new SearchResponse( - null, + SearchHits.EMPTY_WITH_TOTAL_HITS, InternalAggregations.from(Collections.singletonList(responses.get(0))), null, false, @@ -534,7 +582,7 @@ public void testMismatch() throws IOException { ); MultiSearchResponse.Item rolledItem = new MultiSearchResponse.Item( new SearchResponse( - null, + SearchHits.EMPTY_WITH_TOTAL_HITS, InternalAggregations.from(Collections.singletonList(responses.get(1))), null, false, @@ -552,14 +600,17 @@ public void testMismatch() throws IOException { null ); - MultiSearchResponse.Item[] msearch = new MultiSearchResponse.Item[] { unrolledItem, rolledItem }; - - ClassCastException e = expectThrows( - ClassCastException.class, - () -> RollupResponseTranslator.combineResponses(msearch, InternalAggregationTestCase.emptyReduceContextBuilder()) - ); - assertThat(e.getMessage(), containsString("org.elasticsearch.search.aggregations.metrics.InternalGeoBounds")); - assertThat(e.getMessage(), containsString("org.elasticsearch.search.aggregations.InternalMultiBucketAggregation")); + MultiSearchResponse msearch = new MultiSearchResponse(new MultiSearchResponse.Item[] { unrolledItem, rolledItem }, 0); + try { + ClassCastException e = expectThrows( + ClassCastException.class, + () -> RollupResponseTranslator.combineResponses(msearch, InternalAggregationTestCase.emptyReduceContextBuilder()) + ); + assertThat(e.getMessage(), containsString("org.elasticsearch.search.aggregations.metrics.InternalGeoBounds")); + assertThat(e.getMessage(), containsString("org.elasticsearch.search.aggregations.InternalMultiBucketAggregation")); + } finally { + msearch.decRef(); + } } public void testDateHisto() throws IOException { diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java index 1e6a4794b14ae..e2cb5a5bc61b0 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java @@ -43,6 +43,7 @@ import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.index.query.SearchExecutionContextHelper; import org.elasticsearch.script.ScriptCompiler; +import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.AggregatorTestCase; import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregation; @@ -868,7 +869,7 @@ protected void doNextSearch(long waitTimeInNanos, ActionListener ActionListener.respondAndRelease( listener, new SearchResponse( - null, + SearchHits.EMPTY_WITH_TOTAL_HITS, new Aggregations(Collections.singletonList(result)), null, false, From 312d4c2fa172ae8181ea2e37d09c96d051d110a8 Mon Sep 17 00:00:00 2001 From: David Turner Date: Wed, 10 Jan 2024 13:54:07 +0000 Subject: [PATCH 05/75] Mention `IndexFormatToo{Old,New}Exception` as corruption (#104204) If a file header is corrupted then the exception may be reported as a bad index format version rather than a checksum mismatch. This commit adjusts the docs to cover this case. --- .../troubleshooting/corruption-issues.asciidoc | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/docs/reference/troubleshooting/corruption-issues.asciidoc b/docs/reference/troubleshooting/corruption-issues.asciidoc index 4a245daba0904..15897fe8fb3bb 100644 --- a/docs/reference/troubleshooting/corruption-issues.asciidoc +++ b/docs/reference/troubleshooting/corruption-issues.asciidoc @@ -38,6 +38,13 @@ well-tested, so you can be very confident that a checksum mismatch really does indicate that the data read from disk is different from the data that {es} previously wrote. +If a file header is corrupted then it's possible that {es} might not be able +to work out how to even start reading the file which can lead to an exception +such as: + +- `org.apache.lucene.index.IndexFormatTooOldException` +- `org.apache.lucene.index.IndexFormatTooNewException` + It is also possible that {es} reports a corruption if a file it needs is entirely missing, with an exception such as: @@ -50,8 +57,7 @@ system previously confirmed to {es} that this file was durably synced to disk. On Linux this means that the `fsync()` system call returned successfully. {es} sometimes reports that an index is corrupt because a file needed for recovery is missing, or it exists but has been truncated or is missing its footer. This -indicates that your storage system acknowledges durable writes incorrectly or -that some external process has modified the data {es} previously wrote to disk. +may indicate that your storage system acknowledges durable writes incorrectly. There are many possible explanations for {es} detecting corruption in your cluster. Databases like {es} generate a challenging I/O workload that may find From 9ca3be0ea745b1a0e47e15a48fa2c41155466977 Mon Sep 17 00:00:00 2001 From: Jonathan Buttner <56361221+jonathan-buttner@users.noreply.github.com> Date: Wed, 10 Jan 2024 09:02:28 -0500 Subject: [PATCH 06/75] [ML] Adding internal inference input type field (#104153) * Adding input type * Working tests --- .../org/elasticsearch/TransportVersions.java | 1 + .../elasticsearch/inference/InputType.java | 40 +++++++++++++ .../inference/action/InferenceAction.java | 24 ++++++-- .../ml/action/CoordinatedInferenceAction.java | 19 +++++- ...oordinatedInferenceActionRequestTests.java | 53 ++++++++++++++++- .../action/InferModelActionRequestTests.java | 47 ++++++++------- .../action/InferenceActionRequestTests.java | 58 ++++++++++++++++--- .../TransportCoordinatedInferenceAction.java | 25 +++++++- ...nsportCoordinatedInferenceActionTests.java | 37 ++++++++++++ 9 files changed, 265 insertions(+), 39 deletions(-) create mode 100644 server/src/main/java/org/elasticsearch/inference/InputType.java create mode 100644 x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportCoordinatedInferenceActionTests.java diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 76fd9d077e2e7..f289a7a3c89a1 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -181,6 +181,7 @@ static TransportVersion def(int id) { public static final TransportVersion LAZY_ROLLOVER_ADDED = def(8_569_00_0); public static final TransportVersion ESQL_PLAN_POINT_LITERAL_WKB = def(8_570_00_0); public static final TransportVersion HOT_THREADS_AS_BYTES = def(8_571_00_0); + public static final TransportVersion ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED = def(8_572_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/inference/InputType.java b/server/src/main/java/org/elasticsearch/inference/InputType.java new file mode 100644 index 0000000000000..f8bbea4ae121f --- /dev/null +++ b/server/src/main/java/org/elasticsearch/inference/InputType.java @@ -0,0 +1,40 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.inference; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; + +import java.io.IOException; +import java.util.Locale; + +/** + * Defines the type of request, whether the request is to ingest a document or search for a document. + */ +public enum InputType implements Writeable { + INGEST, + SEARCH; + + public static String NAME = "input_type"; + + @Override + public String toString() { + return name().toLowerCase(Locale.ROOT); + } + + public static InputType fromStream(StreamInput in) throws IOException { + return in.readEnum(InputType.class); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeEnum(this); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/InferenceAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/InferenceAction.java index a1eabb682c98f..732bc3d66bedc 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/InferenceAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/InferenceAction.java @@ -17,6 +17,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.inference.InferenceResults; import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.inference.InputType; import org.elasticsearch.inference.TaskType; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xcontent.ObjectParser; @@ -66,12 +67,14 @@ public static Request parseRequest(String modelId, String taskType, XContentPars private final String modelId; private final List input; private final Map taskSettings; + private final InputType inputType; - public Request(TaskType taskType, String modelId, List input, Map taskSettings) { + public Request(TaskType taskType, String modelId, List input, Map taskSettings, InputType inputType) { this.taskType = taskType; this.modelId = modelId; this.input = input; this.taskSettings = taskSettings; + this.inputType = inputType; } public Request(StreamInput in) throws IOException { @@ -84,6 +87,11 @@ public Request(StreamInput in) throws IOException { this.input = List.of(in.readString()); } this.taskSettings = in.readGenericMap(); + if (in.getTransportVersion().onOrAfter(TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED)) { + this.inputType = InputType.fromStream(in); + } else { + this.inputType = InputType.INGEST; + } } public TaskType getTaskType() { @@ -102,6 +110,10 @@ public Map getTaskSettings() { return taskSettings; } + public InputType getInputType() { + return inputType; + } + @Override public ActionRequestValidationException validate() { if (input == null) { @@ -128,6 +140,9 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(input.get(0)); } out.writeGenericMap(taskSettings); + if (out.getTransportVersion().onOrAfter(TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED)) { + inputType.writeTo(out); + } } @Override @@ -138,12 +153,13 @@ public boolean equals(Object o) { return taskType == request.taskType && Objects.equals(modelId, request.modelId) && Objects.equals(input, request.input) - && Objects.equals(taskSettings, request.taskSettings); + && Objects.equals(taskSettings, request.taskSettings) + && Objects.equals(inputType, request.inputType); } @Override public int hashCode() { - return Objects.hash(taskType, modelId, input, taskSettings); + return Objects.hash(taskType, modelId, input, taskSettings, inputType); } public static class Builder { @@ -181,7 +197,7 @@ public Builder setTaskSettings(Map taskSettings) { } public Request build() { - return new Request(taskType, modelId, input, taskSettings); + return new Request(taskType, modelId, input, taskSettings, InputType.INGEST); } } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CoordinatedInferenceAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CoordinatedInferenceAction.java index 03270e0dda0f7..7af3d1a150ac8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CoordinatedInferenceAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CoordinatedInferenceAction.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.core.ml.action; +import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionType; @@ -98,7 +99,8 @@ public static Request forMapInput( // DFA models only private final List> objectsToInfer; - private Request( + // default for testing + Request( String modelId, @Nullable List inputs, @Nullable Map taskSettings, @@ -131,6 +133,12 @@ public Request(StreamInput in) throws IOException { this.previouslyLicensed = in.readOptionalBoolean(); this.inferenceTimeout = in.readOptionalTimeValue(); this.highPriority = in.readBoolean(); + // The prefixType was added prior to TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED but we're serializing it now + // as a safety measure. At the time of writing this it doesn't have to be serialized because this class is only used internally + // and on a single node so it never actually gets serialized. But we'll do it just in case that changes in the future. + if (in.getTransportVersion().onOrAfter(TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED)) { + this.prefixType = in.readEnum(TrainedModelPrefixStrings.PrefixType.class); + } } public String getModelId() { @@ -201,6 +209,9 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalBoolean(previouslyLicensed); out.writeOptionalTimeValue(inferenceTimeout); out.writeBoolean(highPriority); + if (out.getTransportVersion().onOrAfter(TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED)) { + out.writeEnum(prefixType); + } } @Override @@ -221,7 +232,8 @@ public boolean equals(Object o) { && Objects.equals(inferenceConfigUpdate, request.inferenceConfigUpdate) && Objects.equals(previouslyLicensed, request.previouslyLicensed) && Objects.equals(inferenceTimeout, request.inferenceTimeout) - && Objects.equals(highPriority, request.highPriority); + && Objects.equals(highPriority, request.highPriority) + && Objects.equals(prefixType, request.prefixType); } @Override @@ -235,7 +247,8 @@ public int hashCode() { inferenceConfigUpdate, previouslyLicensed, inferenceTimeout, - highPriority + highPriority, + prefixType ); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/CoordinatedInferenceActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/CoordinatedInferenceActionRequestTests.java index bd8e0ad96f21a..8066dcd4993b9 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/CoordinatedInferenceActionRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/CoordinatedInferenceActionRequestTests.java @@ -7,11 +7,13 @@ package org.elasticsearch.xpack.core.ml.action; +import org.elasticsearch.TransportVersion; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase; import org.elasticsearch.xpack.core.ml.inference.MlInferenceNamedXContentProvider; +import org.elasticsearch.xpack.core.ml.inference.TrainedModelPrefixStrings; import java.io.IOException; import java.util.ArrayList; @@ -21,7 +23,30 @@ import java.util.stream.Collectors; import java.util.stream.Stream; -public class CoordinatedInferenceActionRequestTests extends AbstractWireSerializingTestCase { +import static org.elasticsearch.TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED; +import static org.elasticsearch.TransportVersions.UPDATE_API_KEY_EXPIRATION_TIME_ADDED; +import static org.hamcrest.Matchers.is; + +public class CoordinatedInferenceActionRequestTests extends AbstractBWCWireSerializationTestCase { + public void testSerializesPrefixType_WhenTransportVersionIs_InputTypeAdded() throws IOException { + var instance = createTestInstance(); + instance.setPrefixType(TrainedModelPrefixStrings.PrefixType.INGEST); + var copy = copyWriteable(instance, getNamedWriteableRegistry(), instanceReader(), ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED); + assertOnBWCObject(copy, instance, ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED); + assertThat(copy.getPrefixType(), is(TrainedModelPrefixStrings.PrefixType.INGEST)); + } + + public void testSerializesPrefixType_DoesNotSerialize_WhenTransportVersion_IsPriorToInputTypeAdded() throws IOException { + var instance = createTestInstance(); + instance.setPrefixType(TrainedModelPrefixStrings.PrefixType.INGEST); + var copy = copyWriteable(instance, getNamedWriteableRegistry(), instanceReader(), UPDATE_API_KEY_EXPIRATION_TIME_ADDED); + + assertNotSame(copy, instance); + assertNotEquals(copy, instance); + assertNotEquals(copy.hashCode(), instance.hashCode()); + assertThat(copy.getPrefixType(), is(TrainedModelPrefixStrings.PrefixType.NONE)); + } + @Override protected NamedWriteableRegistry getNamedWriteableRegistry() { List entries = new ArrayList<>(); @@ -51,6 +76,7 @@ protected CoordinatedInferenceAction.Request createTestInstance() { inferenceTimeout ); request.setHighPriority(highPriority); + request.setPrefixType(randomFrom(TrainedModelPrefixStrings.PrefixType.values())); yield request; } case 1 -> { @@ -69,6 +95,7 @@ protected CoordinatedInferenceAction.Request createTestInstance() { modelType ); request.setHighPriority(highPriority); + request.setPrefixType(randomFrom(TrainedModelPrefixStrings.PrefixType.values())); yield request; } default -> throw new UnsupportedOperationException(); @@ -85,4 +112,26 @@ private static Map randomMap() { protected CoordinatedInferenceAction.Request mutateInstance(CoordinatedInferenceAction.Request instance) throws IOException { return null; } + + @Override + protected CoordinatedInferenceAction.Request mutateInstanceForVersion( + CoordinatedInferenceAction.Request instance, + TransportVersion version + ) { + if (version.before(ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED)) { + instance.setPrefixType(TrainedModelPrefixStrings.PrefixType.NONE); + } + + return new CoordinatedInferenceAction.Request( + instance.getModelId(), + instance.getInputs(), + instance.getTaskSettings(), + instance.getObjectsToInfer(), + InferModelActionRequestTests.mutateInferenceConfigUpdate(instance.getInferenceConfigUpdate(), version), + instance.getPreviouslyLicensed(), + instance.getInferenceTimeout(), + instance.getHighPriority(), + instance.getRequestModelType() + ); + } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/InferModelActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/InferModelActionRequestTests.java index b33b64ccf69d7..4385867285592 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/InferModelActionRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/InferModelActionRequestTests.java @@ -146,28 +146,8 @@ public static InferenceConfigUpdate randomInferenceConfigUpdate() { ); } - private static Map randomMap() { - return Stream.generate(() -> randomAlphaOfLength(10)) - .limit(randomInt(10)) - .collect(Collectors.toMap(Function.identity(), (v) -> randomAlphaOfLength(10))); - } - - @Override - protected Writeable.Reader instanceReader() { - return Request::new; - } - - @Override - protected NamedWriteableRegistry getNamedWriteableRegistry() { - List entries = new ArrayList<>(); - entries.addAll(new MlInferenceNamedXContentProvider().getNamedWriteables()); - return new NamedWriteableRegistry(entries); - } - - @Override - protected Request mutateInstanceForVersion(Request instance, TransportVersion version) { + public static InferenceConfigUpdate mutateInferenceConfigUpdate(InferenceConfigUpdate currentUpdate, TransportVersion version) { InferenceConfigUpdate adjustedUpdate; - InferenceConfigUpdate currentUpdate = instance.getUpdate(); if (currentUpdate instanceof NlpConfigUpdate nlpConfigUpdate) { if (nlpConfigUpdate instanceof TextClassificationConfigUpdate update) { adjustedUpdate = TextClassificationConfigUpdateTests.mutateForVersion(update, version); @@ -192,6 +172,31 @@ protected Request mutateInstanceForVersion(Request instance, TransportVersion ve adjustedUpdate = currentUpdate; } + return adjustedUpdate; + } + + private static Map randomMap() { + return Stream.generate(() -> randomAlphaOfLength(10)) + .limit(randomInt(10)) + .collect(Collectors.toMap(Function.identity(), (v) -> randomAlphaOfLength(10))); + } + + @Override + protected Writeable.Reader instanceReader() { + return Request::new; + } + + @Override + protected NamedWriteableRegistry getNamedWriteableRegistry() { + List entries = new ArrayList<>(); + entries.addAll(new MlInferenceNamedXContentProvider().getNamedWriteables()); + return new NamedWriteableRegistry(entries); + } + + @Override + protected Request mutateInstanceForVersion(Request instance, TransportVersion version) { + InferenceConfigUpdate adjustedUpdate = mutateInferenceConfigUpdate(instance.getUpdate(), version); + if (version.before(TransportVersions.V_8_3_0)) { return new Request( instance.getId(), diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/InferenceActionRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/InferenceActionRequestTests.java index aa540694ba564..ee7bfc96c1370 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/InferenceActionRequestTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/InferenceActionRequestTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.core.Tuple; +import org.elasticsearch.inference.InputType; import org.elasticsearch.inference.TaskType; import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.elasticsearch.xcontent.json.JsonXContent; @@ -18,6 +19,7 @@ import java.util.ArrayList; import java.util.HashMap; +import static org.hamcrest.Matchers.is; import static org.hamcrest.collection.IsIterableContainingInOrder.contains; public class InferenceActionRequestTests extends AbstractWireSerializingTestCase { @@ -33,7 +35,8 @@ protected InferenceAction.Request createTestInstance() { randomFrom(TaskType.values()), randomAlphaOfLength(6), randomList(1, 5, () -> randomAlphaOfLength(8)), - randomMap(0, 3, () -> new Tuple<>(randomAlphaOfLength(4), randomAlphaOfLength(4))) + randomMap(0, 3, () -> new Tuple<>(randomAlphaOfLength(4), randomAlphaOfLength(4))), + randomFrom(InputType.values()) ); } @@ -59,24 +62,49 @@ public void testParsing() throws IOException { } } + public void testParseRequest_DefaultsInputTypeToIngest() throws IOException { + String singleInputRequest = """ + { + "input": "single text input" + } + """; + try (var parser = createParser(JsonXContent.jsonXContent, singleInputRequest)) { + var request = InferenceAction.Request.parseRequest("model_id", "sparse_embedding", parser); + assertThat(request.getInputType(), is(InputType.INGEST)); + } + } + @Override protected InferenceAction.Request mutateInstance(InferenceAction.Request instance) throws IOException { - int select = randomIntBetween(0, 3); + int select = randomIntBetween(0, 4); return switch (select) { case 0 -> { var nextTask = TaskType.values()[(instance.getTaskType().ordinal() + 1) % TaskType.values().length]; - yield new InferenceAction.Request(nextTask, instance.getModelId(), instance.getInput(), instance.getTaskSettings()); + yield new InferenceAction.Request( + nextTask, + instance.getModelId(), + instance.getInput(), + instance.getTaskSettings(), + instance.getInputType() + ); } case 1 -> new InferenceAction.Request( instance.getTaskType(), instance.getModelId() + "foo", instance.getInput(), - instance.getTaskSettings() + instance.getTaskSettings(), + instance.getInputType() ); case 2 -> { var changedInputs = new ArrayList(instance.getInput()); changedInputs.add("bar"); - yield new InferenceAction.Request(instance.getTaskType(), instance.getModelId(), changedInputs, instance.getTaskSettings()); + yield new InferenceAction.Request( + instance.getTaskType(), + instance.getModelId(), + changedInputs, + instance.getTaskSettings(), + instance.getInputType() + ); } case 3 -> { var taskSettings = new HashMap<>(instance.getTaskSettings()); @@ -86,11 +114,25 @@ protected InferenceAction.Request mutateInstance(InferenceAction.Request instanc var keyToRemove = taskSettings.keySet().iterator().next(); taskSettings.remove(keyToRemove); } - yield new InferenceAction.Request(instance.getTaskType(), instance.getModelId(), instance.getInput(), taskSettings); + yield new InferenceAction.Request( + instance.getTaskType(), + instance.getModelId(), + instance.getInput(), + taskSettings, + instance.getInputType() + ); } - default -> { - throw new UnsupportedOperationException(); + case 4 -> { + var nextInputType = InputType.values()[(instance.getInputType().ordinal() + 1) % InputType.values().length]; + yield new InferenceAction.Request( + instance.getTaskType(), + instance.getModelId(), + instance.getInput(), + instance.getTaskSettings(), + nextInputType + ); } + default -> throw new UnsupportedOperationException(); }; } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCoordinatedInferenceAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCoordinatedInferenceAction.java index 9c368c1a162a8..710ffe14b31e2 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCoordinatedInferenceAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCoordinatedInferenceAction.java @@ -20,6 +20,7 @@ import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.inference.InferenceResults; import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.inference.InputType; import org.elasticsearch.inference.TaskType; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.Task; @@ -28,11 +29,13 @@ import org.elasticsearch.xpack.core.inference.action.InferenceAction; import org.elasticsearch.xpack.core.ml.action.CoordinatedInferenceAction; import org.elasticsearch.xpack.core.ml.action.InferModelAction; +import org.elasticsearch.xpack.core.ml.inference.TrainedModelPrefixStrings; import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentUtils; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.EmptyConfigUpdate; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfigUpdate; import java.util.ArrayList; +import java.util.Map; import java.util.function.Supplier; import static org.elasticsearch.xpack.core.ClientHelper.INFERENCE_ORIGIN; @@ -43,6 +46,13 @@ public class TransportCoordinatedInferenceAction extends HandledTransportAction< CoordinatedInferenceAction.Request, InferModelAction.Response> { + private static final Map PREFIX_TYPE_INPUT_TYPE_MAP = Map.of( + TrainedModelPrefixStrings.PrefixType.INGEST, + InputType.INGEST, + TrainedModelPrefixStrings.PrefixType.SEARCH, + InputType.SEARCH + ); + private final Client client; private final ClusterService clusterService; @@ -103,15 +113,28 @@ private void forNlp(CoordinatedInferenceAction.Request request, ActionListener listener) { + var inputType = convertPrefixToInputType(request.getPrefixType()); + executeAsyncWithOrigin( client, INFERENCE_ORIGIN, InferenceAction.INSTANCE, - new InferenceAction.Request(TaskType.ANY, request.getModelId(), request.getInputs(), request.getTaskSettings()), + new InferenceAction.Request(TaskType.ANY, request.getModelId(), request.getInputs(), request.getTaskSettings(), inputType), listener.delegateFailureAndWrap((l, r) -> l.onResponse(translateInferenceServiceResponse(r.getResults()))) ); } + // default for testing + static InputType convertPrefixToInputType(TrainedModelPrefixStrings.PrefixType prefixType) { + var inputType = PREFIX_TYPE_INPUT_TYPE_MAP.get(prefixType); + + if (inputType == null) { + return InputType.INGEST; + } + + return inputType; + } + private void doInClusterModel(CoordinatedInferenceAction.Request request, ActionListener listener) { var inferModelRequest = translateRequest(request); executeAsyncWithOrigin(client, ML_ORIGIN, InferModelAction.INSTANCE, inferModelRequest, listener); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportCoordinatedInferenceActionTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportCoordinatedInferenceActionTests.java new file mode 100644 index 0000000000000..fa8612021b0c2 --- /dev/null +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportCoordinatedInferenceActionTests.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.inference.InputType; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.ml.inference.TrainedModelPrefixStrings; + +import static org.hamcrest.Matchers.is; + +public class TransportCoordinatedInferenceActionTests extends ESTestCase { + public void testConvertPrefixToInputType_ConvertsIngestCorrectly() { + assertThat( + TransportCoordinatedInferenceAction.convertPrefixToInputType(TrainedModelPrefixStrings.PrefixType.INGEST), + is(InputType.INGEST) + ); + } + + public void testConvertPrefixToInputType_ConvertsSearchCorrectly() { + assertThat( + TransportCoordinatedInferenceAction.convertPrefixToInputType(TrainedModelPrefixStrings.PrefixType.SEARCH), + is(InputType.SEARCH) + ); + } + + public void testConvertPrefixToInputType_DefaultsToIngestWhenUnknown() { + assertThat( + TransportCoordinatedInferenceAction.convertPrefixToInputType(TrainedModelPrefixStrings.PrefixType.NONE), + is(InputType.INGEST) + ); + } +} From 55b7a39166115e7bec757884d292ce6a03b7c10d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Przemys=C5=82aw=20Witek?= Date: Wed, 10 Jan 2024 15:25:49 +0100 Subject: [PATCH 07/75] Integration test for chaining transforms (#104058) --- .../integration/TransformChainIT.java | 165 ++++++++++++++++++ .../integration/TransformRestTestCase.java | 31 +++- 2 files changed, 187 insertions(+), 9 deletions(-) create mode 100644 x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformChainIT.java diff --git a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformChainIT.java b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformChainIT.java new file mode 100644 index 0000000000000..450238b95e26e --- /dev/null +++ b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformChainIT.java @@ -0,0 +1,165 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.transform.integration; + +import org.elasticsearch.client.Request; +import org.elasticsearch.client.RequestOptions; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.rest.action.admin.indices.RestPutIndexTemplateAction; +import org.elasticsearch.threadpool.TestThreadPool; +import org.junit.After; +import org.junit.Before; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; + +public class TransformChainIT extends TransformRestTestCase { + + private static final String DEST_INDEX_TEMPLATE = """ + { + "index_patterns": [ "my-transform-*-dest" ], + "mappings": { + "properties": { + "timestamp": { + "type": "date" + }, + "user_id": { + "type": "keyword" + }, + "stars": { + "type": "integer" + } + } + } + }"""; + + private static final String TRANSFORM_CONFIG_TEMPLATE = """ + { + "source": { + "index": "%s" + }, + "dest": { + "index": "%s" + }, + "sync": { + "time": { + "field": "timestamp" + } + }, + "frequency": "%s", + "pivot": { + "group_by": { + "timestamp": { + "date_histogram": { + "field": "timestamp", + "fixed_interval": "%s" + } + }, + "user_id": { + "terms": { + "field": "user_id" + } + } + }, + "aggregations": { + "stars": { + "sum": { + "field": "stars" + } + } + } + }, + "settings": { + "unattended": true, + "deduce_mappings": %s + } + }"""; + + private TestThreadPool threadPool; + + @Before + public void createThreadPool() { + threadPool = new TestThreadPool(getTestName()); + } + + @After + public void shutdownThreadPool() { + if (threadPool != null) { + threadPool.shutdown(); + } + } + + public void testChainedTransforms() throws Exception { + String reviewsIndexName = "reviews"; + final int numDocs = 100; + createReviewsIndex(reviewsIndexName, numDocs, 100, TransformIT::getUserIdForRow, TransformIT::getDateStringForRow); + + // Create destination index template. It will be used by all the transforms in this test. + Request createIndexTemplateRequest = new Request("PUT", "_template/test_dest_index_template"); + createIndexTemplateRequest.setJsonEntity(DEST_INDEX_TEMPLATE); + createIndexTemplateRequest.setOptions(expectWarnings(RestPutIndexTemplateAction.DEPRECATION_WARNING)); + assertAcknowledged(client().performRequest(createIndexTemplateRequest)); + + final int numberOfTransforms = 3; + List transformIds = new ArrayList<>(numberOfTransforms); + // Create the chain of transforms. Previous transform's destination index becomes next transform's source index. + for (int i = 0; i < numberOfTransforms; ++i) { + String transformId = "my-transform-" + i; + transformIds.add(transformId); + // Set up the transform so that its source index is the destination index of the previous transform in the chain. + // The number of documents is expected to be the same in all the indices. + String sourceIndex = i == 0 ? reviewsIndexName : "my-transform-" + (i - 1) + "-dest"; + String destIndex = transformId + "-dest"; + assertFalse(indexExists(destIndex)); + + assertAcknowledged(putTransform(transformId, createTransformConfig(sourceIndex, destIndex), true, RequestOptions.DEFAULT)); + } + + List transformIdsShuffled = new ArrayList<>(transformIds); + Collections.shuffle(transformIdsShuffled, random()); + // Start all the transforms in random order so that sometimes the transform later in the chain needs to wait for its source index + // to become available. + for (String transformId : transformIdsShuffled) { + startTransform(transformId, RequestOptions.DEFAULT); + } + + // Wait for the transforms to finish processing. Since the transforms are continuous, we cannot wait for them to be STOPPED. + // Instead, we wait for the expected number of processed documents. + assertBusy(() -> { + for (String transformId : transformIds) { + Map stats = getTransformStats(transformId); + // Verify that all the documents got processed. + assertThat( + "Stats were: " + stats, + XContentMapValues.extractValue(stats, "stats", "documents_processed"), + is(equalTo(numDocs)) + ); + } + }, 60, TimeUnit.SECONDS); + + // Stop all the transforms. + for (String transformId : transformIds) { + stopTransform(transformId); + } + // Delete all the transforms. + for (String transformId : transformIds) { + deleteTransform(transformId); + } + } + + private static String createTransformConfig(String sourceIndex, String destIndex) { + return Strings.format(TRANSFORM_CONFIG_TEMPLATE, sourceIndex, destIndex, "1s", "1s", randomBoolean()); + } +} diff --git a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java index f154b13b32add..184df6e098343 100644 --- a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java +++ b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java @@ -164,15 +164,13 @@ protected void stopTransform(String id, boolean waitForCompletion, @Nullable Tim if (timeout != null) { stopTransformRequest.addParameter(TransformField.TIMEOUT.getPreferredName(), timeout.getStringRep()); } - Map stopTransformResponse = entityAsMap(client().performRequest(stopTransformRequest)); - assertThat(stopTransformResponse.get("acknowledged"), equalTo(Boolean.TRUE)); + assertAcknowledged(client().performRequest(stopTransformRequest)); } protected void startTransform(String id, RequestOptions options) throws IOException { Request startTransformRequest = new Request("POST", TRANSFORM_ENDPOINT + id + "/_start"); startTransformRequest.setOptions(options); - Map startTransformResponse = entityAsMap(client().performRequest(startTransformRequest)); - assertThat(startTransformResponse.get("acknowledged"), equalTo(Boolean.TRUE)); + assertAcknowledged(client().performRequest(startTransformRequest)); } // workaround for https://github.com/elastic/elasticsearch/issues/62204 @@ -221,16 +219,24 @@ protected void deleteTransform(String id, boolean force) throws IOException { assertOK(adminClient().performRequest(request)); } - protected void putTransform(String id, String config, RequestOptions options) throws IOException { + protected Response putTransform(String id, String config, RequestOptions options) throws IOException { + return putTransform(id, config, false, options); + } + + protected Response putTransform(String id, String config, boolean deferValidation, RequestOptions options) throws IOException { if (createdTransformIds.contains(id)) { throw new IllegalArgumentException("transform [" + id + "] is already registered"); } - Request put = new Request("PUT", TRANSFORM_ENDPOINT + id); - put.setJsonEntity(config); - put.setOptions(options); - assertOK(client().performRequest(put)); + Request request = new Request("PUT", TRANSFORM_ENDPOINT + id); + request.setJsonEntity(config); + if (deferValidation) { + request.addParameter("defer_validation", "true"); + } + request.setOptions(options); + Response response = assertOK(client().performRequest(request)); createdTransformIds.add(id); + return response; } protected Map previewTransform(String transformConfig, RequestOptions options) throws IOException { @@ -396,7 +402,14 @@ protected TransformConfig.Builder createTransformConfigBuilder( } protected void updateConfig(String id, String update, RequestOptions options) throws Exception { + updateConfig(id, update, false, options); + } + + protected void updateConfig(String id, String update, boolean deferValidation, RequestOptions options) throws Exception { Request updateRequest = new Request("POST", "_transform/" + id + "/_update"); + if (deferValidation) { + updateRequest.addParameter("defer_validation", String.valueOf(deferValidation)); + } updateRequest.setJsonEntity(update); updateRequest.setOptions(options); assertOK(client().performRequest(updateRequest)); From 53db0b33401ad056dbeaaae3e3876488eb0ae817 Mon Sep 17 00:00:00 2001 From: Joe Gallo Date: Wed, 10 Jan 2024 09:55:46 -0500 Subject: [PATCH 08/75] Ingest geoip processor cache 'no results' from the database (#104092) --- docs/changelog/104092.yaml | 5 +++ .../ingest/geoip/GeoIpCache.java | 33 ++++++++++++++++--- .../ingest/geoip/GeoIpProcessor.java | 2 -- .../ingest/geoip/GeoIpCacheTests.java | 21 ++++++++++++ 4 files changed, 54 insertions(+), 7 deletions(-) create mode 100644 docs/changelog/104092.yaml diff --git a/docs/changelog/104092.yaml b/docs/changelog/104092.yaml new file mode 100644 index 0000000000000..b40637d51765e --- /dev/null +++ b/docs/changelog/104092.yaml @@ -0,0 +1,5 @@ +pr: 104092 +summary: Ingest geoip processor cache 'no results' from the database +area: Ingest Node +type: enhancement +issues: [] diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpCache.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpCache.java index 30c0fcb74833c..ff75325624412 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpCache.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpCache.java @@ -18,12 +18,26 @@ import java.util.function.Function; /** - * The in-memory cache for the geoip data. There should only be 1 instance of this class.. + * The in-memory cache for the geoip data. There should only be 1 instance of this class. * This cache differs from the maxmind's {@link NodeCache} such that this cache stores the deserialized Json objects to avoid the * cost of deserialization for each lookup (cached or not). This comes at slight expense of higher memory usage, but significant * reduction of CPU usage. */ final class GeoIpCache { + + /** + * Internal-only sentinel object for recording that a result from the geoip database was null (i.e. there was no result). By caching + * this no-result we can distinguish between something not being in the cache because we haven't searched for that data yet, versus + * something not being in the cache because the data doesn't exist in the database. + */ + // visible for testing + static final AbstractResponse NO_RESULT = new AbstractResponse() { + @Override + public String toString() { + return "AbstractResponse[NO_RESULT]"; + } + }; + private final Cache cache; // package private for testing @@ -40,18 +54,27 @@ T putIfAbsent( String databasePath, Function retrieveFunction ) { - // can't use cache.computeIfAbsent due to the elevated permissions for the jackson (run via the cache loader) CacheKey cacheKey = new CacheKey(ip, databasePath); // intentionally non-locking for simplicity...it's OK if we re-put the same key/value in the cache during a race condition. AbstractResponse response = cache.get(cacheKey); + + // populate the cache for this key, if necessary if (response == null) { response = retrieveFunction.apply(ip); - if (response != null) { - cache.put(cacheKey, response); + // if the response from the database was null, then use the no-result sentinel value + if (response == null) { + response = NO_RESULT; } + // store the result or no-result in the cache + cache.put(cacheKey, response); + } + + if (response == NO_RESULT) { + return null; // the no-result sentinel is an internal detail, don't expose it + } else { + return (T) response; } - return (T) response; } // only useful for testing diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java index 58d55a5a8e6fe..42dddf4c83ef3 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java @@ -172,10 +172,8 @@ private Map getGeoData(GeoIpDatabase geoIpDatabase, String ip) t geoData = retrieveCityGeoData(geoIpDatabase, ipAddress); } else if (databaseType.endsWith(COUNTRY_DB_SUFFIX)) { geoData = retrieveCountryGeoData(geoIpDatabase, ipAddress); - } else if (databaseType.endsWith(ASN_DB_SUFFIX)) { geoData = retrieveAsnGeoData(geoIpDatabase, ipAddress); - } else { throw new ElasticsearchParseException( "Unsupported database type [" + geoIpDatabase.getDatabaseType() + "]", diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpCacheTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpCacheTests.java index 438353be737b9..d049ca3f9bcd0 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpCacheTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpCacheTests.java @@ -13,6 +13,10 @@ import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.test.ESTestCase; +import java.net.InetAddress; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.Function; + import static org.mockito.Mockito.mock; public class GeoIpCacheTests extends ESTestCase { @@ -36,6 +40,23 @@ public void testCachesAndEvictsResults() { assertNotSame(response1, cache.get(InetAddresses.forString("127.0.0.1"), "path/to/db")); } + public void testCachesNoResult() { + GeoIpCache cache = new GeoIpCache(1); + final AtomicInteger count = new AtomicInteger(0); + Function countAndReturnNull = (ip) -> { + count.incrementAndGet(); + return null; + }; + + AbstractResponse response = cache.putIfAbsent(InetAddresses.forString("127.0.0.1"), "path/to/db", countAndReturnNull); + assertNull(response); + assertNull(cache.putIfAbsent(InetAddresses.forString("127.0.0.1"), "path/to/db", countAndReturnNull)); + assertEquals(1, count.get()); + + // the cached value is not actually *null*, it's the NO_RESULT sentinel + assertSame(GeoIpCache.NO_RESULT, cache.get(InetAddresses.forString("127.0.0.1"), "path/to/db")); + } + public void testCacheKey() { GeoIpCache cache = new GeoIpCache(2); AbstractResponse response1 = mock(AbstractResponse.class); From 8e0de773f92408ed88d74f10f0b3a65ec4d31fde Mon Sep 17 00:00:00 2001 From: Jedr Blaszyk Date: Wed, 10 Jan 2024 16:38:43 +0100 Subject: [PATCH 09/75] [Connectors API] Handle initialising connector default values in index service (#103977) --- .../test/entsearch/300_connector_put.yml | 14 +++ .../test/entsearch/305_connector_post.yml | 12 +++ .../test/entsearch/333_connector_check_in.yml | 15 ++- .../application/connector/Connector.java | 23 ++--- .../connector/ConnectorIndexService.java | 97 +++++++++++++++---- .../connector/action/PostConnectorAction.java | 17 +++- .../connector/action/PutConnectorAction.java | 12 ++- .../action/TransportPostConnectorAction.java | 16 +-- .../action/TransportPutConnectorAction.java | 20 +--- ...ransportUpdateConnectorLastSeenAction.java | 5 +- .../UpdateConnectorConfigurationAction.java | 4 +- .../action/UpdateConnectorErrorAction.java | 2 +- .../UpdateConnectorFilteringAction.java | 4 +- .../action/UpdateConnectorLastSeenAction.java | 14 +-- .../UpdateConnectorLastSyncStatsAction.java | 2 +- .../action/UpdateConnectorNameAction.java | 8 +- .../action/UpdateConnectorPipelineAction.java | 4 +- .../UpdateConnectorSchedulingAction.java | 4 +- .../connector/ConnectorIndexServiceTests.java | 58 ++++++++--- .../action/PostConnectorActionTests.java | 47 +++++++++ .../action/PutConnectorActionTests.java | 65 +++++++++++++ 21 files changed, 333 insertions(+), 110 deletions(-) create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/PostConnectorActionTests.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/PutConnectorActionTests.java diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/300_connector_put.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/300_connector_put.yml index 77d4f28721525..c7bc5f48a3d89 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/300_connector_put.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/300_connector_put.yml @@ -101,3 +101,17 @@ setup: service_type: super-connector - match: { result: 'updated' } + +--- +'Create Connector - Invalid Index Name': + - do: + catch: "bad_request" + connector.put: + connector_id: test-connector-recreating + body: + index_name: _this-is-invalid-index-name + name: my-connector + language: pl + is_native: false + service_type: super-connector + diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/305_connector_post.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/305_connector_post.yml index 8d0fa14311608..9b7432adf290d 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/305_connector_post.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/305_connector_post.yml @@ -76,3 +76,15 @@ setup: - match: { custom_scheduling: {} } - match: { filtering.0.domain: DEFAULT } +--- +'Create Connector - Invalid Index Name': + - do: + catch: "bad_request" + connector.post: + body: + index_name: _this-is-invalid-index-name + name: my-connector + language: pl + is_native: false + service_type: super-connector + diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/333_connector_check_in.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/333_connector_check_in.yml index 042fea7091f43..39b7b2d03e68f 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/333_connector_check_in.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/333_connector_check_in.yml @@ -2,7 +2,7 @@ setup: - skip: version: " - 8.11.99" reason: Introduced in 8.12.0 - + features: is_after - do: connector.put: connector_id: test-connector @@ -25,6 +25,19 @@ setup: connector_id: test-connector - exists: last_seen + - set: { last_seen: last_seen_before_check_in } + + - do: + connector.check_in: + connector_id: test-connector + + - match: { result: updated } + + - do: + connector.get: + connector_id: test-connector + + - is_after: { last_seen: $last_seen_before_check_in } --- "Connector Check-in Error - Connector doesn't exist": diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java index 74d9be8db0fac..3d598d7e44777 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java @@ -147,12 +147,12 @@ private Connector( ) { this.connectorId = connectorId; this.apiKeyId = apiKeyId; - this.configuration = Objects.requireNonNull(configuration, "[configuration] cannot be null"); - this.customScheduling = Objects.requireNonNull(customScheduling, "[custom_scheduling] cannot be null"); + this.configuration = configuration; + this.customScheduling = customScheduling; this.description = description; this.error = error; this.features = features; - this.filtering = Objects.requireNonNull(filtering, "[filtering] cannot be null"); + this.filtering = filtering; this.indexName = indexName; this.isNative = isNative; this.language = language; @@ -160,9 +160,9 @@ private Connector( this.syncInfo = syncInfo; this.name = name; this.pipeline = pipeline; - this.scheduling = Objects.requireNonNull(scheduling, "[scheduling] cannot be null"); + this.scheduling = scheduling; this.serviceType = serviceType; - this.status = Objects.requireNonNull(status, "[status] cannot be null"); + this.status = status; this.syncCursor = syncCursor; this.syncNow = syncNow; } @@ -549,19 +549,19 @@ public static class Builder { private String description; private String error; private ConnectorFeatures features; - private List filtering = List.of(ConnectorFiltering.getDefaultConnectorFilteringConfig()); + private List filtering; private String indexName; - private boolean isNative = false; + private boolean isNative; private String language; private Instant lastSeen; private ConnectorSyncInfo syncInfo = new ConnectorSyncInfo.Builder().build(); private String name; private ConnectorIngestPipeline pipeline; - private ConnectorScheduling scheduling = ConnectorScheduling.getDefaultConnectorScheduling(); + private ConnectorScheduling scheduling; private String serviceType; private ConnectorStatus status = ConnectorStatus.CREATED; private Object syncCursor; - private boolean syncNow = false; + private boolean syncNow; public Builder setConnectorId(String connectorId) { this.connectorId = connectorId; @@ -610,9 +610,6 @@ public Builder setIndexName(String indexName) { public Builder setIsNative(boolean isNative) { this.isNative = isNative; - if (isNative) { - this.status = ConnectorStatus.NEEDS_CONFIGURATION; - } return this; } @@ -632,7 +629,7 @@ public Builder setSyncInfo(ConnectorSyncInfo syncInfo) { } public Builder setName(String name) { - this.name = Objects.requireNonNullElse(name, ""); + this.name = name; return this; } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java index 8a1b336bfa1e3..41451c76b90f8 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java @@ -32,19 +32,21 @@ import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.application.connector.action.PostConnectorAction; +import org.elasticsearch.xpack.application.connector.action.PutConnectorAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorConfigurationAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorErrorAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorFilteringAction; -import org.elasticsearch.xpack.application.connector.action.UpdateConnectorLastSeenAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorLastSyncStatsAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorNameAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorPipelineAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorSchedulingAction; +import java.time.Instant; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.function.BiConsumer; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; @@ -67,16 +69,25 @@ public ConnectorIndexService(Client client) { } /** - * Creates or updates the {@link Connector} in the underlying index. + * Creates or updates the {@link Connector} in the underlying index with a specific doc ID. * - * @param docId The ID of the connector. - * @param connector The connector object. + * @param request Request for creating the connector. * @param listener The action listener to invoke on response/failure. */ - public void putConnector(String docId, Connector connector, ActionListener listener) { + public void createConnectorWithDocId(PutConnectorAction.Request request, ActionListener listener) { + + Connector connector = createConnectorWithDefaultValues( + request.getDescription(), + request.getIndexName(), + request.getIsNative(), + request.getLanguage(), + request.getName(), + request.getServiceType() + ); + try { final IndexRequest indexRequest = new IndexRequest(CONNECTOR_INDEX_NAME).opType(DocWriteRequest.OpType.INDEX) - .id(docId) + .id(request.getConnectorId()) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .source(connector.toXContent(jsonBuilder(), ToXContent.EMPTY_PARAMS)); clientWithOrigin.index(indexRequest, listener); @@ -86,13 +97,25 @@ public void putConnector(String docId, Connector connector, ActionListener listener) { + public void createConnectorWithAutoGeneratedId( + PostConnectorAction.Request request, + ActionListener listener + ) { + + Connector connector = createConnectorWithDefaultValues( + request.getDescription(), + request.getIndexName(), + request.getIsNative(), + request.getLanguage(), + request.getName(), + request.getServiceType() + ); + try { final IndexRequest indexRequest = new IndexRequest(CONNECTOR_INDEX_NAME).opType(DocWriteRequest.OpType.INDEX) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) @@ -107,6 +130,43 @@ public void postConnector(Connector connector, ActionListener listener) { try { String connectorId = request.getConnectorId(); + final UpdateRequest updateRequest = new UpdateRequest(CONNECTOR_INDEX_NAME, connectorId).doc( new IndexRequest(CONNECTOR_INDEX_NAME).opType(DocWriteRequest.OpType.INDEX) .id(connectorId) @@ -315,7 +376,7 @@ public void updateConnectorFiltering(UpdateConnectorFilteringAction.Request requ new IndexRequest(CONNECTOR_INDEX_NAME).opType(DocWriteRequest.OpType.INDEX) .id(connectorId) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) - .source(request.toXContent(jsonBuilder(), ToXContent.EMPTY_PARAMS)) + .source(Map.of(Connector.FILTERING_FIELD.getPreferredName(), request.getFiltering())) ); clientWithOrigin.update( updateRequest, @@ -335,17 +396,16 @@ public void updateConnectorFiltering(UpdateConnectorFilteringAction.Request requ /** * Updates the lastSeen property of a {@link Connector}. * - * @param request The request for updating the connector's lastSeen status. - * @param listener The listener for handling responses, including successful updates or errors. + * @param connectorId The id of the connector object. + * @param listener The listener for handling responses, including successful updates or errors. */ - public void updateConnectorLastSeen(UpdateConnectorLastSeenAction.Request request, ActionListener listener) { + public void checkInConnector(String connectorId, ActionListener listener) { try { - String connectorId = request.getConnectorId(); final UpdateRequest updateRequest = new UpdateRequest(CONNECTOR_INDEX_NAME, connectorId).doc( new IndexRequest(CONNECTOR_INDEX_NAME).opType(DocWriteRequest.OpType.INDEX) .id(connectorId) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) - .source(request.toXContent(jsonBuilder(), ToXContent.EMPTY_PARAMS)) + .source(Map.of(Connector.LAST_SEEN_FIELD.getPreferredName(), Instant.now())) ); clientWithOrigin.update( updateRequest, @@ -405,6 +465,7 @@ public void updateConnectorPipeline(UpdateConnectorPipelineAction.Request reques new IndexRequest(CONNECTOR_INDEX_NAME).opType(DocWriteRequest.OpType.INDEX) .id(connectorId) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .source(Map.of(Connector.PIPELINE_FIELD.getPreferredName(), request.getPipeline())) .source(request.toXContent(jsonBuilder(), ToXContent.EMPTY_PARAMS)) ); clientWithOrigin.update( @@ -435,7 +496,7 @@ public void updateConnectorScheduling(UpdateConnectorSchedulingAction.Request re new IndexRequest(CONNECTOR_INDEX_NAME).opType(DocWriteRequest.OpType.INDEX) .id(connectorId) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) - .source(request.toXContent(jsonBuilder(), ToXContent.EMPTY_PARAMS)) + .source(Map.of(Connector.SCHEDULING_FIELD.getPreferredName(), request.getScheduling())) ); clientWithOrigin.update( updateRequest, diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/PostConnectorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/PostConnectorAction.java index 947c2f63d4950..0e9fd56799bde 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/PostConnectorAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/PostConnectorAction.java @@ -12,11 +12,14 @@ import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; +import org.elasticsearch.cluster.metadata.MetadataCreateIndexService; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Nullable; +import org.elasticsearch.indices.InvalidIndexNameException; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; @@ -29,6 +32,7 @@ import java.io.IOException; import java.util.Objects; +import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; @@ -135,7 +139,18 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws @Override public ActionRequestValidationException validate() { - return null; + ActionRequestValidationException validationException = null; + + if (Strings.isNullOrEmpty(getIndexName())) { + validationException = addValidationError("[index_name] cannot be [null] or [\"\"]", validationException); + } + try { + MetadataCreateIndexService.validateIndexOrAliasName(getIndexName(), InvalidIndexNameException::new); + } catch (InvalidIndexNameException e) { + validationException = addValidationError(e.toString(), validationException); + } + + return validationException; } @Override diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/PutConnectorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/PutConnectorAction.java index 592be3a6b37ab..8a4ad5db1dbb0 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/PutConnectorAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/PutConnectorAction.java @@ -13,12 +13,14 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.cluster.metadata.MetadataCreateIndexService; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Nullable; +import org.elasticsearch.indices.InvalidIndexNameException; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; @@ -154,7 +156,15 @@ public ActionRequestValidationException validate() { ActionRequestValidationException validationException = null; if (Strings.isNullOrEmpty(getConnectorId())) { - validationException = addValidationError("connector_id cannot be null or empty", validationException); + validationException = addValidationError("[connector_id] cannot be [null] or [\"\"]", validationException); + } + if (Strings.isNullOrEmpty(getIndexName())) { + validationException = addValidationError("[index_name] cannot be [null] or [\"\"]", validationException); + } + try { + MetadataCreateIndexService.validateIndexOrAliasName(getIndexName(), InvalidIndexNameException::new); + } catch (InvalidIndexNameException e) { + validationException = addValidationError(e.toString(), validationException); } return validationException; diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportPostConnectorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportPostConnectorAction.java index 7b66ca81a77f9..3d3592c706941 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportPostConnectorAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportPostConnectorAction.java @@ -16,11 +16,8 @@ import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.application.connector.Connector; import org.elasticsearch.xpack.application.connector.ConnectorIndexService; -import java.util.Objects; - public class TransportPostConnectorAction extends HandledTransportAction { protected final ConnectorIndexService connectorIndexService; @@ -44,17 +41,6 @@ public TransportPostConnectorAction( @Override protected void doExecute(Task task, PostConnectorAction.Request request, ActionListener listener) { - - Boolean isNative = Objects.requireNonNullElse(request.getIsNative(), false); - - Connector connector = new Connector.Builder().setDescription(request.getDescription()) - .setIndexName(request.getIndexName()) - .setIsNative(isNative) - .setLanguage(request.getLanguage()) - .setName(request.getName()) - .setServiceType(request.getServiceType()) - .build(); - - connectorIndexService.postConnector(connector, listener); + connectorIndexService.createConnectorWithAutoGeneratedId(request, listener); } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportPutConnectorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportPutConnectorAction.java index 8f4ac53b03bbd..c8c1dfed059c8 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportPutConnectorAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportPutConnectorAction.java @@ -16,11 +16,8 @@ import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.application.connector.Connector; import org.elasticsearch.xpack.application.connector.ConnectorIndexService; -import java.util.Objects; - public class TransportPutConnectorAction extends HandledTransportAction { protected final ConnectorIndexService connectorIndexService; @@ -44,21 +41,6 @@ public TransportPutConnectorAction( @Override protected void doExecute(Task task, PutConnectorAction.Request request, ActionListener listener) { - - Boolean isNative = Objects.requireNonNullElse(request.getIsNative(), false); - - Connector connector = new Connector.Builder().setDescription(request.getDescription()) - .setIndexName(request.getIndexName()) - .setIsNative(isNative) - .setLanguage(request.getLanguage()) - .setName(request.getName()) - .setServiceType(request.getServiceType()) - .build(); - - connectorIndexService.putConnector( - request.getConnectorId(), - connector, - listener.map(r -> new PutConnectorAction.Response(r.getResult())) - ); + connectorIndexService.createConnectorWithDocId(request, listener.map(r -> new PutConnectorAction.Response(r.getResult()))); } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorLastSeenAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorLastSeenAction.java index 60c75bce8314a..c1ac90a8b018d 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorLastSeenAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorLastSeenAction.java @@ -47,6 +47,9 @@ protected void doExecute( UpdateConnectorLastSeenAction.Request request, ActionListener listener ) { - connectorIndexService.updateConnectorLastSeen(request, listener.map(r -> new ConnectorUpdateActionResponse(r.getResult()))); + connectorIndexService.checkInConnector( + request.getConnectorId(), + listener.map(r -> new ConnectorUpdateActionResponse(r.getResult())) + ); } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorConfigurationAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorConfigurationAction.java index 19e7628746485..e1c41cf90968c 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorConfigurationAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorConfigurationAction.java @@ -72,11 +72,11 @@ public ActionRequestValidationException validate() { ActionRequestValidationException validationException = null; if (Strings.isNullOrEmpty(connectorId)) { - validationException = addValidationError("[connector_id] cannot be null or empty.", validationException); + validationException = addValidationError("[connector_id] cannot be [null] or [\"\"].", validationException); } if (Objects.isNull(configuration)) { - validationException = addValidationError("[configuration] cannot be null.", validationException); + validationException = addValidationError("[configuration] cannot be [null].", validationException); } return validationException; diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorErrorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorErrorAction.java index ad2036ecbaf81..0f7a17af5d207 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorErrorAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorErrorAction.java @@ -71,7 +71,7 @@ public ActionRequestValidationException validate() { ActionRequestValidationException validationException = null; if (Strings.isNullOrEmpty(connectorId)) { - validationException = addValidationError("[connector_id] cannot be null or empty.", validationException); + validationException = addValidationError("[connector_id] cannot be [null] or [\"\"].", validationException); } return validationException; diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorFilteringAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorFilteringAction.java index dabb87f2afc22..8085e9399c35a 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorFilteringAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorFilteringAction.java @@ -70,11 +70,11 @@ public ActionRequestValidationException validate() { ActionRequestValidationException validationException = null; if (Strings.isNullOrEmpty(connectorId)) { - validationException = addValidationError("[connector_id] cannot be null or empty.", validationException); + validationException = addValidationError("[connector_id] cannot be [null] or [\"\"].", validationException); } if (filtering == null) { - validationException = addValidationError("[filtering] cannot be null.", validationException); + validationException = addValidationError("[filtering] cannot be [null].", validationException); } return validationException; diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSeenAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSeenAction.java index bd20513e47033..f287326c08215 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSeenAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSeenAction.java @@ -18,7 +18,6 @@ import org.elasticsearch.xpack.application.connector.Connector; import java.io.IOException; -import java.time.Instant; import java.util.Objects; import static org.elasticsearch.action.ValidateActions.addValidationError; @@ -36,17 +35,13 @@ public static class Request extends ActionRequest implements ToXContentObject { private final String connectorId; - private final Instant lastSeen; - public Request(String connectorId) { this.connectorId = connectorId; - this.lastSeen = Instant.now(); } public Request(StreamInput in) throws IOException { super(in); this.connectorId = in.readString(); - this.lastSeen = in.readInstant(); } public String getConnectorId() { @@ -58,7 +53,7 @@ public ActionRequestValidationException validate() { ActionRequestValidationException validationException = null; if (Strings.isNullOrEmpty(connectorId)) { - validationException = addValidationError("[connector_id] cannot be null or empty.", validationException); + validationException = addValidationError("[connector_id] cannot be [null] or [\"\"].", validationException); } return validationException; @@ -68,7 +63,7 @@ public ActionRequestValidationException validate() { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); { - builder.field(Connector.LAST_SEEN_FIELD.getPreferredName(), lastSeen); + builder.field(Connector.ID_FIELD.getPreferredName(), connectorId); } builder.endObject(); return builder; @@ -78,7 +73,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeString(connectorId); - out.writeInstant(lastSeen); } @Override @@ -86,12 +80,12 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Request request = (Request) o; - return Objects.equals(connectorId, request.connectorId) && Objects.equals(lastSeen, request.lastSeen); + return Objects.equals(connectorId, request.connectorId); } @Override public int hashCode() { - return Objects.hash(connectorId, lastSeen); + return Objects.hash(connectorId); } } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSyncStatsAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSyncStatsAction.java index 7d82c28ca4af1..d8bd5beba80f8 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSyncStatsAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSyncStatsAction.java @@ -72,7 +72,7 @@ public ActionRequestValidationException validate() { ActionRequestValidationException validationException = null; if (Strings.isNullOrEmpty(connectorId)) { - validationException = addValidationError("[connector_id] cannot be null or empty.", validationException); + validationException = addValidationError("[connector_id] cannot be [null] or [\"\"].", validationException); } return validationException; diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorNameAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorNameAction.java index 6b5c580e396ad..b5dd7d07c77e3 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorNameAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorNameAction.java @@ -82,10 +82,10 @@ public ActionRequestValidationException validate() { ActionRequestValidationException validationException = null; if (Strings.isNullOrEmpty(connectorId)) { - validationException = addValidationError("[connector_id] cannot be null or empty.", validationException); + validationException = addValidationError("[connector_id] cannot be [null] or [\"\"].", validationException); } if (Strings.isNullOrEmpty(name)) { - validationException = addValidationError("[name] cannot be null or empty.", validationException); + validationException = addValidationError("[name] cannot be [null] or [\"\"].", validationException); } return validationException; @@ -122,9 +122,7 @@ public static UpdateConnectorNameAction.Request fromXContent(XContentParser pars public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); { - if (name != null) { - builder.field(Connector.NAME_FIELD.getPreferredName(), name); - } + builder.field(Connector.NAME_FIELD.getPreferredName(), name); if (description != null) { builder.field(Connector.DESCRIPTION_FIELD.getPreferredName(), description); } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorPipelineAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorPipelineAction.java index ba5b0e702bf0e..23de40857c446 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorPipelineAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorPipelineAction.java @@ -69,11 +69,11 @@ public ActionRequestValidationException validate() { ActionRequestValidationException validationException = null; if (Strings.isNullOrEmpty(connectorId)) { - validationException = addValidationError("[connector_id] cannot be null or empty.", validationException); + validationException = addValidationError("[connector_id] cannot be [null] or [\"\"].", validationException); } if (Objects.isNull(pipeline)) { - validationException = addValidationError("[pipeline] cannot be null.", validationException); + validationException = addValidationError("[pipeline] cannot be [null].", validationException); } return validationException; diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorSchedulingAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorSchedulingAction.java index df76e9a09547a..e48dd94b6710b 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorSchedulingAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorSchedulingAction.java @@ -69,11 +69,11 @@ public ActionRequestValidationException validate() { ActionRequestValidationException validationException = null; if (Strings.isNullOrEmpty(connectorId)) { - validationException = addValidationError("[connector_id] cannot be null or empty.", validationException); + validationException = addValidationError("[connector_id] cannot be [null] or [\"\"].", validationException); } if (Objects.isNull(scheduling)) { - validationException = addValidationError("[scheduling] cannot be null.", validationException); + validationException = addValidationError("[scheduling] cannot be [null].", validationException); } return validationException; diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java index eedfea13c671b..9a2e0a82895fc 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.xpack.application.connector.action.PostConnectorAction; +import org.elasticsearch.xpack.application.connector.action.PutConnectorAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorConfigurationAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorErrorAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorFilteringAction; @@ -51,7 +52,7 @@ public void setup() { public void testPutConnector() throws Exception { Connector connector = ConnectorTestUtils.getRandomConnector(); String connectorId = randomUUID(); - DocWriteResponse resp = awaitPutConnector(connectorId, connector); + DocWriteResponse resp = buildRequestAndAwaitPutConnector(connectorId, connector); assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); Connector indexedConnector = awaitGetConnector(connectorId); @@ -60,7 +61,7 @@ public void testPutConnector() throws Exception { public void testPostConnector() throws Exception { Connector connector = ConnectorTestUtils.getRandomConnector(); - PostConnectorAction.Response resp = awaitPostConnector(connector); + PostConnectorAction.Response resp = buildRequestAndAwaitPostConnector(connector); Connector indexedConnector = awaitGetConnector(resp.getId()); assertThat(resp.getId(), equalTo(indexedConnector.getConnectorId())); @@ -71,7 +72,7 @@ public void testDeleteConnector() throws Exception { List connectorIds = new ArrayList<>(); for (int i = 0; i < numConnectors; i++) { Connector connector = ConnectorTestUtils.getRandomConnector(); - PostConnectorAction.Response resp = awaitPostConnector(connector); + PostConnectorAction.Response resp = buildRequestAndAwaitPostConnector(connector); connectorIds.add(resp.getId()); } @@ -86,7 +87,7 @@ public void testDeleteConnector() throws Exception { public void testUpdateConnectorConfiguration() throws Exception { Connector connector = ConnectorTestUtils.getRandomConnector(); String connectorId = randomUUID(); - DocWriteResponse resp = awaitPutConnector(connectorId, connector); + DocWriteResponse resp = buildRequestAndAwaitPutConnector(connectorId, connector); assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); Map connectorConfiguration = connector.getConfiguration() @@ -109,7 +110,7 @@ public void testUpdateConnectorConfiguration() throws Exception { public void testUpdateConnectorPipeline() throws Exception { Connector connector = ConnectorTestUtils.getRandomConnector(); String connectorId = randomUUID(); - DocWriteResponse resp = awaitPutConnector(connectorId, connector); + DocWriteResponse resp = buildRequestAndAwaitPutConnector(connectorId, connector); assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); ConnectorIngestPipeline updatedPipeline = new ConnectorIngestPipeline.Builder().setName("test-pipeline") @@ -133,7 +134,7 @@ public void testUpdateConnectorFiltering() throws Exception { Connector connector = ConnectorTestUtils.getRandomConnector(); String connectorId = randomUUID(); - DocWriteResponse resp = awaitPutConnector(connectorId, connector); + DocWriteResponse resp = buildRequestAndAwaitPutConnector(connectorId, connector); assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); List filteringList = IntStream.range(0, 10) @@ -155,7 +156,7 @@ public void testUpdateConnectorLastSeen() throws Exception { Connector connector = ConnectorTestUtils.getRandomConnector(); String connectorId = randomUUID(); - DocWriteResponse resp = awaitPutConnector(connectorId, connector); + DocWriteResponse resp = buildRequestAndAwaitPutConnector(connectorId, connector); assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); UpdateConnectorLastSeenAction.Request checkInRequest = new UpdateConnectorLastSeenAction.Request(connectorId); @@ -179,7 +180,7 @@ public void testUpdateConnectorLastSyncStats() throws Exception { Connector connector = ConnectorTestUtils.getRandomConnector(); String connectorId = randomUUID(); - DocWriteResponse resp = awaitPutConnector(connectorId, connector); + DocWriteResponse resp = buildRequestAndAwaitPutConnector(connectorId, connector); assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); ConnectorSyncInfo syncStats = ConnectorTestUtils.getRandomConnectorSyncInfo(); @@ -198,7 +199,7 @@ public void testUpdateConnectorScheduling() throws Exception { Connector connector = ConnectorTestUtils.getRandomConnector(); String connectorId = randomUUID(); - DocWriteResponse resp = awaitPutConnector(connectorId, connector); + DocWriteResponse resp = buildRequestAndAwaitPutConnector(connectorId, connector); assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); ConnectorScheduling updatedScheduling = ConnectorTestUtils.getRandomConnectorScheduling(); @@ -218,7 +219,7 @@ public void testUpdateConnectorScheduling() throws Exception { public void testUpdateConnectorError() throws Exception { Connector connector = ConnectorTestUtils.getRandomConnector(); String connectorId = randomUUID(); - DocWriteResponse resp = awaitPutConnector(connectorId, connector); + DocWriteResponse resp = buildRequestAndAwaitPutConnector(connectorId, connector); assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); UpdateConnectorErrorAction.Request updateErrorRequest = new UpdateConnectorErrorAction.Request( @@ -236,7 +237,7 @@ public void testUpdateConnectorError() throws Exception { public void testUpdateConnectorNameOrDescription() throws Exception { Connector connector = ConnectorTestUtils.getRandomConnector(); String connectorId = randomUUID(); - DocWriteResponse resp = awaitPutConnector(connectorId, connector); + DocWriteResponse resp = buildRequestAndAwaitPutConnector(connectorId, connector); assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); UpdateConnectorNameAction.Request updateNameDescriptionRequest = new UpdateConnectorNameAction.Request( @@ -278,11 +279,24 @@ public void onFailure(Exception e) { return resp.get(); } - private DocWriteResponse awaitPutConnector(String docId, Connector connector) throws Exception { + private DocWriteResponse buildRequestAndAwaitPutConnector(String docId, Connector connector) throws Exception { + PutConnectorAction.Request putConnectorRequest = new PutConnectorAction.Request( + docId, + connector.getDescription(), + connector.getIndexName(), + connector.isNative(), + connector.getLanguage(), + connector.getName(), + connector.getServiceType() + ); + return awaitPutConnector(putConnectorRequest); + } + + private DocWriteResponse awaitPutConnector(PutConnectorAction.Request request) throws Exception { CountDownLatch latch = new CountDownLatch(1); final AtomicReference resp = new AtomicReference<>(null); final AtomicReference exc = new AtomicReference<>(null); - connectorIndexService.putConnector(docId, connector, new ActionListener<>() { + connectorIndexService.createConnectorWithDocId(request, new ActionListener<>() { @Override public void onResponse(DocWriteResponse indexResponse) { resp.set(indexResponse); @@ -303,11 +317,23 @@ public void onFailure(Exception e) { return resp.get(); } - private PostConnectorAction.Response awaitPostConnector(Connector connector) throws Exception { + private PostConnectorAction.Response buildRequestAndAwaitPostConnector(Connector connector) throws Exception { + PostConnectorAction.Request postConnectorRequest = new PostConnectorAction.Request( + connector.getDescription(), + connector.getIndexName(), + connector.isNative(), + connector.getLanguage(), + connector.getName(), + connector.getServiceType() + ); + return awaitPostConnector(postConnectorRequest); + } + + private PostConnectorAction.Response awaitPostConnector(PostConnectorAction.Request request) throws Exception { CountDownLatch latch = new CountDownLatch(1); final AtomicReference resp = new AtomicReference<>(null); final AtomicReference exc = new AtomicReference<>(null); - connectorIndexService.postConnector(connector, new ActionListener<>() { + connectorIndexService.createConnectorWithAutoGeneratedId(request, new ActionListener<>() { @Override public void onResponse(PostConnectorAction.Response indexResponse) { resp.set(indexResponse); @@ -435,7 +461,7 @@ private UpdateResponse awaitUpdateConnectorLastSeen(UpdateConnectorLastSeenActio CountDownLatch latch = new CountDownLatch(1); final AtomicReference resp = new AtomicReference<>(null); final AtomicReference exc = new AtomicReference<>(null); - connectorIndexService.updateConnectorLastSeen(checkIn, new ActionListener<>() { + connectorIndexService.checkInConnector(checkIn.getConnectorId(), new ActionListener<>() { @Override public void onResponse(UpdateResponse indexResponse) { resp.set(indexResponse); diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/PostConnectorActionTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/PostConnectorActionTests.java new file mode 100644 index 0000000000000..0f0e83f2b9c51 --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/PostConnectorActionTests.java @@ -0,0 +1,47 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; + +public class PostConnectorActionTests extends ESTestCase { + + public void testValidate_WhenConnectorIdAndIndexNamePresent_ExpectNoValidationError() { + PostConnectorAction.Request request = new PostConnectorAction.Request( + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomBoolean(), + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10) + ); + ActionRequestValidationException exception = request.validate(); + + assertThat(exception, nullValue()); + } + + public void testValidate_WhenMalformedIndexName_ExpectValidationError() { + PostConnectorAction.Request requestWithMissingConnectorId = new PostConnectorAction.Request( + randomAlphaOfLength(10), + "_illegal-index-name", + randomBoolean(), + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10) + ); + ActionRequestValidationException exception = requestWithMissingConnectorId.validate(); + + assertThat(exception, notNullValue()); + assertThat(exception.getMessage(), containsString("Invalid index name [_illegal-index-name]")); + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/PutConnectorActionTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/PutConnectorActionTests.java new file mode 100644 index 0000000000000..a35c5c7e408f3 --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/PutConnectorActionTests.java @@ -0,0 +1,65 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; + +public class PutConnectorActionTests extends ESTestCase { + + public void testValidate_WhenConnectorIdAndIndexNamePresent_ExpectNoValidationError() { + PutConnectorAction.Request request = new PutConnectorAction.Request( + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomBoolean(), + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10) + ); + ActionRequestValidationException exception = request.validate(); + + assertThat(exception, nullValue()); + } + + public void testValidate_WhenConnectorIdIsNull_ExpectValidationError() { + PutConnectorAction.Request requestWithMissingConnectorId = new PutConnectorAction.Request( + null, + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomBoolean(), + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10) + ); + ActionRequestValidationException exception = requestWithMissingConnectorId.validate(); + + assertThat(exception, notNullValue()); + assertThat(exception.getMessage(), containsString("[connector_id] cannot be [null] or [\"\"]")); + } + + public void testValidate_WhenMalformedIndexName_ExpectValidationError() { + PutConnectorAction.Request requestWithMissingConnectorId = new PutConnectorAction.Request( + randomAlphaOfLength(10), + randomAlphaOfLength(10), + "_illegal-index-name", + randomBoolean(), + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10) + ); + ActionRequestValidationException exception = requestWithMissingConnectorId.validate(); + + assertThat(exception, notNullValue()); + assertThat(exception.getMessage(), containsString("Invalid index name [_illegal-index-name]")); + } +} From 79e3a67c78ce3d4a4dffe16ca7c809c325750ecf Mon Sep 17 00:00:00 2001 From: Bogdan Pintea Date: Wed, 10 Jan 2024 17:09:02 +0100 Subject: [PATCH 10/75] Update security QA suite to run in Serverless (#104194) This removes the node count setting from configuring the test cluster, and adds the `internal-test-artifact` plugin to suite's config. --- x-pack/plugin/esql/qa/security/build.gradle | 2 ++ .../java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java | 3 +-- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/esql/qa/security/build.gradle b/x-pack/plugin/esql/qa/security/build.gradle index 33371320b865d..068a4fd8f4989 100644 --- a/x-pack/plugin/esql/qa/security/build.gradle +++ b/x-pack/plugin/esql/qa/security/build.gradle @@ -1,4 +1,6 @@ apply plugin: 'elasticsearch.internal-java-rest-test' +// Necessary to use tests in Serverless +apply plugin: 'elasticsearch.internal-test-artifact' tasks.named('javaRestTest') { usesDefaultDistribution() diff --git a/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java b/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java index e363fa64c594d..bb8163915c1c4 100644 --- a/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java +++ b/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java @@ -38,11 +38,10 @@ public class EsqlSecurityIT extends ESRestTestCase { @ClassRule public static ElasticsearchCluster cluster = ElasticsearchCluster.local() .distribution(DistributionType.DEFAULT) - .nodes(2) .setting("xpack.license.self_generated.type", "trial") .setting("xpack.security.enabled", "true") .rolesFile(Resource.fromClasspath("roles.yml")) - .user("test-admin", "x-pack-test-password", "test-admin", false) + .user("test-admin", "x-pack-test-password", "test-admin", true) .user("user1", "x-pack-test-password", "user1", false) .user("user2", "x-pack-test-password", "user2", false) .user("user3", "x-pack-test-password", "user3", false) From 709c0f5d31a08838c1c96fbb2bacc8fa532c76fe Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Wed, 10 Jan 2024 08:11:23 -0800 Subject: [PATCH 11/75] Re-enable HeapAttackIT (#104107) This PR enables ESQL heap attack tests. I have run this suite over 500 iterations with different hardware configurations for the last two days, and all have been successful with the changes in #104159. Additionally, this PR adds an action that can trigger OOM to generate a heap dump if a test takes more than 5 minutes. I've seen cases (previously with our CI) where the test didn't result in OOM but was taking too long. Having the ability to inspect the heap in such cases would be beneficial. Closes #103527 Closes #100678 --- .../apm-integration/build.gradle | 5 -- test/external-modules/build.gradle | 5 -- .../delayed-aggs/build.gradle | 7 +++ .../die-with-dignity/build.gradle | 4 -- .../external-modules/error-query/build.gradle | 7 +++ .../esql-heap-attack/build.gradle | 20 ++++++ .../xpack/esql}/heap_attack/HeapAttackIT.java | 63 ++++++++++++++++--- .../esql/heap_attack/HeapAttackPlugin.java | 45 +++++++++++++ .../RestTriggerOutOfMemoryAction.java | 52 +++++++++++++++ .../esql/qa/server/heap-attack/build.gradle | 11 ---- 10 files changed, 187 insertions(+), 32 deletions(-) create mode 100644 test/external-modules/esql-heap-attack/build.gradle rename {x-pack/plugin/esql/qa/server/heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa => test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql}/heap_attack/HeapAttackIT.java (88%) create mode 100644 test/external-modules/esql-heap-attack/src/main/java/org/elasticsearch/test/esql/heap_attack/HeapAttackPlugin.java create mode 100644 test/external-modules/esql-heap-attack/src/main/java/org/elasticsearch/test/esql/heap_attack/RestTriggerOutOfMemoryAction.java delete mode 100644 x-pack/plugin/esql/qa/server/heap-attack/build.gradle diff --git a/test/external-modules/apm-integration/build.gradle b/test/external-modules/apm-integration/build.gradle index b3df1d622af54..3ae62c72968b3 100644 --- a/test/external-modules/apm-integration/build.gradle +++ b/test/external-modules/apm-integration/build.gradle @@ -16,15 +16,10 @@ tasks.named("test").configure { enabled = false } -tasks.named("yamlRestTest").configure { - enabled = false -} - tasks.named('javaRestTest').configure { it.onlyIf("snapshot build") { BuildParams.isSnapshotBuild() } } - dependencies { clusterModules project(':modules:apm') } diff --git a/test/external-modules/build.gradle b/test/external-modules/build.gradle index b0deae90b40d9..a663006a57236 100644 --- a/test/external-modules/build.gradle +++ b/test/external-modules/build.gradle @@ -3,15 +3,10 @@ import org.elasticsearch.gradle.internal.info.BuildParams subprojects { apply plugin: 'elasticsearch.base-internal-es-plugin' - apply plugin: 'elasticsearch.legacy-yaml-rest-test' esplugin { name it.name licenseFile rootProject.file('licenses/SSPL-1.0+ELASTIC-LICENSE-2.0.txt') noticeFile rootProject.file('NOTICE.txt') } - - tasks.named('yamlRestTest').configure { - it.onlyIf("snapshot build") { BuildParams.isSnapshotBuild() } - } } diff --git a/test/external-modules/delayed-aggs/build.gradle b/test/external-modules/delayed-aggs/build.gradle index 88a1fe5568c66..11461beeaad7d 100644 --- a/test/external-modules/delayed-aggs/build.gradle +++ b/test/external-modules/delayed-aggs/build.gradle @@ -5,6 +5,13 @@ * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ +import org.elasticsearch.gradle.internal.info.BuildParams + +apply plugin: 'elasticsearch.legacy-yaml-rest-test' + +tasks.named('yamlRestTest').configure { + it.onlyIf("snapshot build") { BuildParams.isSnapshotBuild() } +} esplugin { description 'A test module that allows to delay aggregations on shards with a configurable time' diff --git a/test/external-modules/die-with-dignity/build.gradle b/test/external-modules/die-with-dignity/build.gradle index 999b81af027b3..34a9a71533d3c 100644 --- a/test/external-modules/die-with-dignity/build.gradle +++ b/test/external-modules/die-with-dignity/build.gradle @@ -16,10 +16,6 @@ tasks.named("test").configure { enabled = false } -tasks.named("yamlRestTest").configure { - enabled = false -} - tasks.named('javaRestTest').configure { it.onlyIf("snapshot build") { BuildParams.isSnapshotBuild() } } diff --git a/test/external-modules/error-query/build.gradle b/test/external-modules/error-query/build.gradle index c9b8ab9a4dfd5..328c0e3e20f50 100644 --- a/test/external-modules/error-query/build.gradle +++ b/test/external-modules/error-query/build.gradle @@ -6,6 +6,13 @@ * Side Public License, v 1. */ +import org.elasticsearch.gradle.internal.info.BuildParams +apply plugin: 'elasticsearch.legacy-yaml-rest-test' + +tasks.named('yamlRestTest').configure { + it.onlyIf("snapshot build") { BuildParams.isSnapshotBuild() } +} + esplugin { description 'A test module that exposes a way to simulate search shard failures and warnings' classname 'org.elasticsearch.test.errorquery.ErrorQueryPlugin' diff --git a/test/external-modules/esql-heap-attack/build.gradle b/test/external-modules/esql-heap-attack/build.gradle new file mode 100644 index 0000000000000..b9cead7b2318a --- /dev/null +++ b/test/external-modules/esql-heap-attack/build.gradle @@ -0,0 +1,20 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +apply plugin: 'elasticsearch.internal-java-rest-test' +// Necessary to use tests in Serverless +apply plugin: 'elasticsearch.internal-test-artifact' + +esplugin { + description 'A test module that can trigger out of memory' + classname 'org.elasticsearch.test.esql.heap_attack.HeapAttackPlugin' +} + +tasks.named('javaRestTest') { + usesDefaultDistribution() +} diff --git a/x-pack/plugin/esql/qa/server/heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/heap_attack/HeapAttackIT.java b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java similarity index 88% rename from x-pack/plugin/esql/qa/server/heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/heap_attack/HeapAttackIT.java rename to test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java index 0f5dd72feafbb..2418d1104d244 100644 --- a/x-pack/plugin/esql/qa/server/heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/heap_attack/HeapAttackIT.java +++ b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java @@ -5,27 +5,34 @@ * 2.0. */ -package org.elasticsearch.xpack.esql.qa.heap_attack; +package org.elasticsearch.xpack.esql.heap_attack; +import org.apache.http.HttpEntity; +import org.apache.http.HttpHost; import org.apache.http.client.config.RequestConfig; import org.apache.http.util.EntityUtils; -import org.apache.lucene.tests.util.LuceneTestCase; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; +import org.elasticsearch.client.RestClient; import org.elasticsearch.client.WarningsHandler; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.TimeValue; import org.elasticsearch.test.ListMatcher; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.cluster.local.distribution.DistributionType; import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.threadpool.Scheduler; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.json.JsonXContent; -import org.elasticsearch.xpack.esql.qa.rest.EsqlSpecTestCase; import org.junit.After; import org.junit.Before; import org.junit.ClassRule; @@ -51,12 +58,12 @@ * Tests that run ESQL queries that have, in the past, used so much memory they * crash Elasticsearch. */ -@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/103527") public class HeapAttackIT extends ESRestTestCase { @ClassRule public static ElasticsearchCluster cluster = ElasticsearchCluster.local() .distribution(DistributionType.DEFAULT) + .module("test-esql-heap-attack") .setting("xpack.security.enabled", "false") .setting("xpack.license.self_generated.type", "trial") .build(); @@ -265,7 +272,6 @@ public void testManyEval() throws IOException { assertMap(map, matchesMap().entry("columns", columns).entry("values", hasSize(10_000))); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/100528") public void testTooManyEval() throws IOException { initManyLongs(); assertCircuitBreaks(() -> manyEval(1000)); @@ -299,7 +305,40 @@ private Response query(String query, String filterPath) throws IOException { .setRequestConfig(RequestConfig.custom().setSocketTimeout(Math.toIntExact(TimeValue.timeValueMinutes(5).millis())).build()) .setWarningsHandler(WarningsHandler.PERMISSIVE) ); - return client().performRequest(request); + logger.info("--> test {} started querying", getTestName()); + final ThreadPool testThreadPool = new TestThreadPool(getTestName()); + final long startedTimeInNanos = System.nanoTime(); + Scheduler.Cancellable schedule = null; + try { + schedule = testThreadPool.schedule(new AbstractRunnable() { + @Override + public void onFailure(Exception e) { + throw new AssertionError(e); + } + + @Override + protected void doRun() throws Exception { + TimeValue elapsed = TimeValue.timeValueNanos(System.nanoTime() - startedTimeInNanos); + logger.info("--> test {} triggering OOM after {}", getTestName(), elapsed); + Request triggerOOM = new Request("POST", "/_trigger_out_of_memory"); + client().performRequest(triggerOOM); + } + }, TimeValue.timeValueMinutes(5), testThreadPool.executor(ThreadPool.Names.GENERIC)); + Response resp = client().performRequest(request); + logger.info("--> test {} completed querying", getTestName()); + return resp; + } finally { + if (schedule != null) { + schedule.cancel(); + } + terminate(testThreadPool); + } + } + + @Override + protected RestClient buildClient(Settings settings, HttpHost[] hosts) throws IOException { + settings = Settings.builder().put(settings).put(ESRestTestCase.CLIENT_SOCKET_TIMEOUT, "6m").build(); + return super.buildClient(settings, hosts); } public void testFetchManyBigFields() throws IOException { @@ -510,6 +549,16 @@ private static void assertWriteResponse(Response response) throws IOException { @Before @After public void assertRequestBreakerEmpty() throws Exception { - EsqlSpecTestCase.assertRequestBreakerEmpty(); + assertBusy(() -> { + HttpEntity entity = adminClient().performRequest(new Request("GET", "/_nodes/stats")).getEntity(); + Map stats = XContentHelper.convertToMap(XContentType.JSON.xContent(), entity.getContent(), false); + Map nodes = (Map) stats.get("nodes"); + for (Object n : nodes.values()) { + Map node = (Map) n; + Map breakers = (Map) node.get("breakers"); + Map request = (Map) breakers.get("request"); + assertMap(request, matchesMap().extraOk().entry("estimated_size_in_bytes", 0).entry("estimated_size", "0b")); + } + }); } } diff --git a/test/external-modules/esql-heap-attack/src/main/java/org/elasticsearch/test/esql/heap_attack/HeapAttackPlugin.java b/test/external-modules/esql-heap-attack/src/main/java/org/elasticsearch/test/esql/heap_attack/HeapAttackPlugin.java new file mode 100644 index 0000000000000..a1bf71070f3c8 --- /dev/null +++ b/test/external-modules/esql-heap-attack/src/main/java/org/elasticsearch/test/esql/heap_attack/HeapAttackPlugin.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.test.esql.heap_attack; + +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.IndexScopedSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsFilter; +import org.elasticsearch.plugins.ActionPlugin; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestHandler; + +import java.util.List; +import java.util.function.Supplier; + +public class HeapAttackPlugin extends Plugin implements ActionPlugin { + @Override + public List getRestHandlers( + Settings settings, + RestController restController, + ClusterSettings clusterSettings, + IndexScopedSettings indexScopedSettings, + SettingsFilter settingsFilter, + IndexNameExpressionResolver indexNameExpressionResolver, + Supplier nodesInCluster + ) { + return List.of(new RestTriggerOutOfMemoryAction()); + } +} diff --git a/test/external-modules/esql-heap-attack/src/main/java/org/elasticsearch/test/esql/heap_attack/RestTriggerOutOfMemoryAction.java b/test/external-modules/esql-heap-attack/src/main/java/org/elasticsearch/test/esql/heap_attack/RestTriggerOutOfMemoryAction.java new file mode 100644 index 0000000000000..d0a146edde765 --- /dev/null +++ b/test/external-modules/esql-heap-attack/src/main/java/org/elasticsearch/test/esql/heap_attack/RestTriggerOutOfMemoryAction.java @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.test.esql.heap_attack; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.logging.LogManager; +import org.elasticsearch.logging.Logger; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; + +import java.util.ArrayList; +import java.util.List; + +import static org.elasticsearch.rest.RestRequest.Method.POST; + +public class RestTriggerOutOfMemoryAction extends BaseRestHandler { + private static final Logger LOGGER = LogManager.getLogger(RestTriggerOutOfMemoryAction.class); + + @Override + public String getName() { + return "trigger_out_of_memory"; + } + + @Override + public List routes() { + return List.of(new Route(POST, "/_trigger_out_of_memory")); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) { + LOGGER.error("triggering out of memory"); + List values = new ArrayList<>(); + return channel -> { + while (true) { + values.add(new int[1024 * 1024]); + } + }; + } +} diff --git a/x-pack/plugin/esql/qa/server/heap-attack/build.gradle b/x-pack/plugin/esql/qa/server/heap-attack/build.gradle deleted file mode 100644 index 93eae7d3b9d18..0000000000000 --- a/x-pack/plugin/esql/qa/server/heap-attack/build.gradle +++ /dev/null @@ -1,11 +0,0 @@ -apply plugin: 'elasticsearch.internal-java-rest-test' -// Necessary to use tests in Serverless -apply plugin: 'elasticsearch.internal-test-artifact' - -dependencies { - javaRestTestImplementation project(xpackModule('esql:qa:server')) -} - -tasks.named('javaRestTest') { - usesDefaultDistribution() -} From 4b21499f2d50ecc0fd07e36170965f0a55ff6565 Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Wed, 10 Jan 2024 17:39:52 +0100 Subject: [PATCH 12/75] Rename some functions in SpatialCoordinateTypes (#104201) --- .../compute/data/BasicBlockTests.java | 6 ++-- .../elasticsearch/xpack/esql/CsvAssert.java | 4 +-- .../xpack/esql/CsvTestUtils.java | 4 +-- .../xpack/esql/action/ColumnInfo.java | 4 +-- .../xpack/esql/action/ResponseValueUtils.java | 8 ++--- .../scalar/convert/ToCartesianPoint.java | 2 +- .../function/scalar/convert/ToGeoPoint.java | 2 +- .../function/scalar/convert/ToString.java | 4 +-- .../xpack/esql/io/stream/PlanNamedTypes.java | 2 +- .../esql/action/EsqlQueryResponseTests.java | 6 ++-- .../function/AbstractFunctionTestCase.java | 4 +-- .../expression/function/TestCaseSupplier.java | 10 ++----- .../scalar/convert/ToCartesianPointTests.java | 6 ++-- .../scalar/convert/ToGeoPointTests.java | 6 ++-- .../scalar/convert/ToStringTests.java | 4 +-- .../AbstractMultivalueFunctionTestCase.java | 4 +-- .../xpack/esql/formatter/TextFormatTests.java | 8 ++--- .../esql/formatter/TextFormatterTests.java | 12 ++++---- .../xpack/ql/util/SpatialCoordinateTypes.java | 30 +++++++++---------- .../ql/util/SpatialCoordinateTypesTests.java | 2 +- 20 files changed, 60 insertions(+), 68 deletions(-) diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java index 7681b147824a5..dd775564f12a3 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java @@ -447,11 +447,11 @@ public void testBytesRefBlock() { } public void testBytesRefBlockOnGeoPoints() { - testBytesRefBlock(() -> GEO.pointAsWKB(GeometryTestUtils.randomPoint()), false, GEO::wkbAsString); + testBytesRefBlock(() -> GEO.asWkb(GeometryTestUtils.randomPoint()), false, GEO::wkbToWkt); } public void testBytesRefBlockOnCartesianPoints() { - testBytesRefBlock(() -> CARTESIAN.pointAsWKB(ShapeTestUtils.randomPoint()), false, CARTESIAN::wkbAsString); + testBytesRefBlock(() -> CARTESIAN.asWkb(ShapeTestUtils.randomPoint()), false, CARTESIAN::wkbToWkt); } public void testBytesRefBlockBuilderWithNulls() { @@ -930,7 +930,7 @@ public static RandomBlock randomBlock( } case BYTES_REF -> { BytesRef b = bytesRefFromPoints - ? GEO.pointAsWKB(pointSupplier.get()) + ? GEO.asWkb(pointSupplier.get()) : new BytesRef(randomRealisticUnicodeOfLength(4)); valuesAtPosition.add(b); ((BytesRefBlock.Builder) builder).appendBytesRef(b); diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvAssert.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvAssert.java index 49dc585c01753..3968c2f33fca8 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvAssert.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvAssert.java @@ -202,9 +202,9 @@ public static void assertData( if (expectedType == Type.DATETIME) { expectedValue = rebuildExpected(expectedValue, Long.class, x -> UTC_DATE_TIME_FORMATTER.formatMillis((long) x)); } else if (expectedType == Type.GEO_POINT) { - expectedValue = rebuildExpected(expectedValue, BytesRef.class, x -> GEO.wkbAsString((BytesRef) x)); + expectedValue = rebuildExpected(expectedValue, BytesRef.class, x -> GEO.wkbToWkt((BytesRef) x)); } else if (expectedType == Type.CARTESIAN_POINT) { - expectedValue = rebuildExpected(expectedValue, BytesRef.class, x -> CARTESIAN.wkbAsString((BytesRef) x)); + expectedValue = rebuildExpected(expectedValue, BytesRef.class, x -> CARTESIAN.wkbToWkt((BytesRef) x)); } else if (expectedType == Type.IP) { // convert BytesRef-packed IP to String, allowing subsequent comparison with what's expected expectedValue = rebuildExpected(expectedValue, BytesRef.class, x -> DocValueFormat.IP.format((BytesRef) x)); diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java index 919ef66456230..d49d5a964e944 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java @@ -391,8 +391,8 @@ public enum Type { Long.class ), BOOLEAN(Booleans::parseBoolean, Boolean.class), - GEO_POINT(x -> x == null ? null : GEO.stringAsWKB(x), BytesRef.class), - CARTESIAN_POINT(x -> x == null ? null : CARTESIAN.stringAsWKB(x), BytesRef.class); + GEO_POINT(x -> x == null ? null : GEO.wktToWkb(x), BytesRef.class), + CARTESIAN_POINT(x -> x == null ? null : CARTESIAN.wktToWkb(x), BytesRef.class); private static final Map LOOKUP = new HashMap<>(); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ColumnInfo.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ColumnInfo.java index 673ec0bc4a184..43a16872fd99a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ColumnInfo.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ColumnInfo.java @@ -166,14 +166,14 @@ protected XContentBuilder valueToXContent(XContentBuilder builder, ToXContent.Pa @Override protected XContentBuilder valueToXContent(XContentBuilder builder, ToXContent.Params params, int valueIndex) throws IOException { - return builder.value(GEO.wkbAsString(((BytesRefBlock) block).getBytesRef(valueIndex, scratch))); + return builder.value(GEO.wkbToWkt(((BytesRefBlock) block).getBytesRef(valueIndex, scratch))); } }; case "cartesian_point" -> new PositionToXContent(block) { @Override protected XContentBuilder valueToXContent(XContentBuilder builder, ToXContent.Params params, int valueIndex) throws IOException { - return builder.value(CARTESIAN.wkbAsString(((BytesRefBlock) block).getBytesRef(valueIndex, scratch))); + return builder.value(CARTESIAN.wkbToWkt(((BytesRefBlock) block).getBytesRef(valueIndex, scratch))); } }; case "boolean" -> new PositionToXContent(block) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseValueUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseValueUtils.java index 625b488b1e857..40bc90d8c5b0c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseValueUtils.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseValueUtils.java @@ -101,8 +101,8 @@ private static Object valueAt(String dataType, Block block, int offset, BytesRef } case "boolean" -> ((BooleanBlock) block).getBoolean(offset); case "version" -> new Version(((BytesRefBlock) block).getBytesRef(offset, scratch)).toString(); - case "geo_point" -> GEO.wkbAsString(((BytesRefBlock) block).getBytesRef(offset, scratch)); - case "cartesian_point" -> CARTESIAN.wkbAsString(((BytesRefBlock) block).getBytesRef(offset, scratch)); + case "geo_point" -> GEO.wkbToWkt(((BytesRefBlock) block).getBytesRef(offset, scratch)); + case "cartesian_point" -> CARTESIAN.wkbToWkt(((BytesRefBlock) block).getBytesRef(offset, scratch)); case "unsupported" -> UnsupportedValueSource.UNSUPPORTED_OUTPUT; case "_source" -> { BytesRef val = ((BytesRefBlock) block).getBytesRef(offset, scratch); @@ -163,12 +163,12 @@ static Page valuesToPage(BlockFactory blockFactory, List columns, Li } case "geo_point" -> { // This just converts WKT to WKB, so does not need CRS knowledge, we could merge GEO and CARTESIAN here - BytesRef wkb = GEO.stringAsWKB(value.toString()); + BytesRef wkb = GEO.wktToWkb(value.toString()); ((BytesRefBlock.Builder) builder).appendBytesRef(wkb); } case "cartesian_point" -> { // This just converts WKT to WKB, so does not need CRS knowledge, we could merge GEO and CARTESIAN here - BytesRef wkb = CARTESIAN.stringAsWKB(value.toString()); + BytesRef wkb = CARTESIAN.wktToWkb(value.toString()); ((BytesRefBlock.Builder) builder).appendBytesRef(wkb); } default -> throw EsqlIllegalArgumentException.illegalDataType(dataTypes.get(c)); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPoint.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPoint.java index 3b8bd582571f4..baa999e125f7e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPoint.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPoint.java @@ -59,6 +59,6 @@ protected NodeInfo info() { @ConvertEvaluator(extraName = "FromString", warnExceptions = { IllegalArgumentException.class }) static BytesRef fromKeyword(BytesRef in) { - return CARTESIAN.stringAsWKB(in.utf8ToString()); + return CARTESIAN.wktToWkb(in.utf8ToString()); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPoint.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPoint.java index ab265dad6a477..8680d6d7e4a2c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPoint.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPoint.java @@ -59,6 +59,6 @@ protected NodeInfo info() { @ConvertEvaluator(extraName = "FromString", warnExceptions = { IllegalArgumentException.class }) static BytesRef fromKeyword(BytesRef in) { - return GEO.stringAsWKB(in.utf8ToString()); + return GEO.wktToWkb(in.utf8ToString()); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToString.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToString.java index 26baac4f8bcb6..ea5343c74a105 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToString.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToString.java @@ -141,11 +141,11 @@ static BytesRef fromUnsignedLong(long lng) { @ConvertEvaluator(extraName = "FromGeoPoint") static BytesRef fromGeoPoint(BytesRef wkb) { - return new BytesRef(GEO.wkbAsString(wkb)); + return new BytesRef(GEO.wkbToWkt(wkb)); } @ConvertEvaluator(extraName = "FromCartesianPoint") static BytesRef fromCartesianPoint(BytesRef wkb) { - return new BytesRef(CARTESIAN.wkbAsString(wkb)); + return new BytesRef(CARTESIAN.wkbToWkt(wkb)); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java index 3a2f8797103aa..e655a60825f3a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java @@ -1642,7 +1642,7 @@ private static Object mapToLiteralValue(PlanStreamInput in, DataType dataType, O } private static BytesRef longAsWKB(DataType dataType, long encoded) { - return dataType == GEO_POINT ? GEO.longAsWKB(encoded) : CARTESIAN.longAsWKB(encoded); + return dataType == GEO_POINT ? GEO.longAsWkb(encoded) : CARTESIAN.longAsWkb(encoded); } private static long wkbAsLong(DataType dataType, BytesRef wkb) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java index fa5334fb33ef7..24e356520ff3d 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java @@ -150,10 +150,8 @@ private Page randomPage(List columns) { new BytesRef(UnsupportedValueSource.UNSUPPORTED_OUTPUT) ); case "version" -> ((BytesRefBlock.Builder) builder).appendBytesRef(new Version(randomIdentifier()).toBytesRef()); - case "geo_point" -> ((BytesRefBlock.Builder) builder).appendBytesRef(GEO.pointAsWKB(GeometryTestUtils.randomPoint())); - case "cartesian_point" -> ((BytesRefBlock.Builder) builder).appendBytesRef( - CARTESIAN.pointAsWKB(ShapeTestUtils.randomPoint()) - ); + case "geo_point" -> ((BytesRefBlock.Builder) builder).appendBytesRef(GEO.asWkb(GeometryTestUtils.randomPoint())); + case "cartesian_point" -> ((BytesRefBlock.Builder) builder).appendBytesRef(CARTESIAN.asWkb(ShapeTestUtils.randomPoint())); case "null" -> builder.appendNull(); case "_source" -> { try { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index 56cbbaf5b010d..c12c4419e40d9 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -127,8 +127,8 @@ public static Literal randomLiteral(DataType type) { case "time_duration" -> Duration.ofMillis(randomLongBetween(-604800000L, 604800000L)); // plus/minus 7 days case "text" -> new BytesRef(randomAlphaOfLength(50)); case "version" -> randomVersion().toBytesRef(); - case "geo_point" -> GEO.pointAsWKB(GeometryTestUtils.randomPoint()); - case "cartesian_point" -> CARTESIAN.pointAsWKB(ShapeTestUtils.randomPoint()); + case "geo_point" -> GEO.asWkb(GeometryTestUtils.randomPoint()); + case "cartesian_point" -> CARTESIAN.asWkb(ShapeTestUtils.randomPoint()); case "null" -> null; case "_source" -> { try { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java index 15684044a7881..8de007da36113 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java @@ -913,18 +913,12 @@ public static List timeDurationCases() { } private static List geoPointCases() { - return List.of( - new TypedDataSupplier("", () -> GEO.pointAsWKB(GeometryTestUtils.randomPoint()), EsqlDataTypes.GEO_POINT) - ); + return List.of(new TypedDataSupplier("", () -> GEO.asWkb(GeometryTestUtils.randomPoint()), EsqlDataTypes.GEO_POINT)); } private static List cartesianPointCases() { return List.of( - new TypedDataSupplier( - "", - () -> CARTESIAN.pointAsWKB(ShapeTestUtils.randomPoint()), - EsqlDataTypes.CARTESIAN_POINT - ) + new TypedDataSupplier("", () -> CARTESIAN.asWkb(ShapeTestUtils.randomPoint()), EsqlDataTypes.CARTESIAN_POINT) ); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPointTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPointTests.java index 3144cc4e6940a..88910320c962e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPointTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPointTests.java @@ -46,7 +46,7 @@ public static Iterable parameters() { EsqlDataTypes.CARTESIAN_POINT, bytesRef -> null, bytesRef -> { - var exception = expectThrows(Exception.class, () -> CARTESIAN.stringAsWKB(bytesRef.utf8ToString())); + var exception = expectThrows(Exception.class, () -> CARTESIAN.wktToWkb(bytesRef.utf8ToString())); return List.of( "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", "Line -1:-1: " + exception @@ -60,12 +60,12 @@ public static Iterable parameters() { List.of( new TestCaseSupplier.TypedDataSupplier( "", - () -> new BytesRef(CARTESIAN.pointAsString(ShapeTestUtils.randomPoint())), + () -> new BytesRef(CARTESIAN.asWkt(ShapeTestUtils.randomPoint())), DataTypes.KEYWORD ) ), EsqlDataTypes.CARTESIAN_POINT, - bytesRef -> CARTESIAN.stringAsWKB(((BytesRef) bytesRef).utf8ToString()), + bytesRef -> CARTESIAN.wktToWkb(((BytesRef) bytesRef).utf8ToString()), List.of() ); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPointTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPointTests.java index 9c1a2b3002ec4..4a5534e1d5d1a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPointTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPointTests.java @@ -46,7 +46,7 @@ public static Iterable parameters() { EsqlDataTypes.GEO_POINT, bytesRef -> null, bytesRef -> { - var exception = expectThrows(Exception.class, () -> GEO.stringAsWKB(bytesRef.utf8ToString())); + var exception = expectThrows(Exception.class, () -> GEO.wktToWkb(bytesRef.utf8ToString())); return List.of( "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", "Line -1:-1: " + exception @@ -60,12 +60,12 @@ public static Iterable parameters() { List.of( new TestCaseSupplier.TypedDataSupplier( "", - () -> new BytesRef(GEO.pointAsString(GeometryTestUtils.randomPoint())), + () -> new BytesRef(GEO.asWkt(GeometryTestUtils.randomPoint())), DataTypes.KEYWORD ) ), EsqlDataTypes.GEO_POINT, - bytesRef -> GEO.stringAsWKB(((BytesRef) bytesRef).utf8ToString()), + bytesRef -> GEO.wktToWkb(((BytesRef) bytesRef).utf8ToString()), List.of() ); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringTests.java index 46721c190c7b6..918956de08648 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringTests.java @@ -91,14 +91,14 @@ public static Iterable parameters() { suppliers, "ToStringFromGeoPointEvaluator[field=" + read + "]", DataTypes.KEYWORD, - wkb -> new BytesRef(GEO.wkbAsString(wkb)), + wkb -> new BytesRef(GEO.wkbToWkt(wkb)), List.of() ); TestCaseSupplier.forUnaryCartesianPoint( suppliers, "ToStringFromCartesianPointEvaluator[field=" + read + "]", DataTypes.KEYWORD, - wkb -> new BytesRef(CARTESIAN.wkbAsString(wkb)), + wkb -> new BytesRef(CARTESIAN.wkbToWkt(wkb)), List.of() ); TestCaseSupplier.forUnaryIp( diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunctionTestCase.java index 6f0a2edafaf04..d2e7e924fb95c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunctionTestCase.java @@ -469,7 +469,7 @@ protected static void points( BiFunction, Matcher> matcher ) { cases.add(new TestCaseSupplier(name + "(" + dataType.typeName() + ")", List.of(dataType), () -> { - BytesRef wkb = spatial.pointAsWKB(randomPoint.get()); + BytesRef wkb = spatial.asWkb(randomPoint.get()); return new TestCaseSupplier.TestCase( List.of(new TestCaseSupplier.TypedData(List.of(wkb), dataType, "field")), evaluatorName + "[field=Attribute[channel=0]]", @@ -479,7 +479,7 @@ protected static void points( })); for (Block.MvOrdering ordering : Block.MvOrdering.values()) { cases.add(new TestCaseSupplier(name + "(<" + dataType.typeName() + "s>) " + ordering, List.of(dataType), () -> { - List mvData = randomList(1, 100, () -> spatial.pointAsWKB(randomPoint.get())); + List mvData = randomList(1, 100, () -> spatial.asWkb(randomPoint.get())); putInOrder(mvData, ordering); return new TestCaseSupplier.TestCase( List.of(new TestCaseSupplier.TypedData(mvData, dataType, "field")), diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatTests.java index bbe32350a0465..8403dc3775dce 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatTests.java @@ -260,8 +260,8 @@ private static EsqlQueryResponse regularData() { ); BytesRefArray geoPoints = new BytesRefArray(2, BigArrays.NON_RECYCLING_INSTANCE); - geoPoints.append(GEO.pointAsWKB(new Point(12, 56))); - geoPoints.append(GEO.pointAsWKB(new Point(-97, 26))); + geoPoints.append(GEO.asWkb(new Point(12, 56))); + geoPoints.append(GEO.asWkb(new Point(-97, 26))); // values List values = List.of( new Page( @@ -272,8 +272,8 @@ private static EsqlQueryResponse regularData() { blockFactory.newIntArrayVector(new int[] { 11 * 60 + 48, 4 * 60 + 40 }, 2).asBlock(), blockFactory.newBytesRefArrayVector(geoPoints, 2).asBlock(), blockFactory.newBytesRefBlockBuilder(2) - .appendBytesRef(CARTESIAN.pointAsWKB(new Point(1234, 5678))) - .appendBytesRef(CARTESIAN.pointAsWKB(new Point(-9753, 2611))) + .appendBytesRef(CARTESIAN.asWkb(new Point(1234, 5678))) + .appendBytesRef(CARTESIAN.asWkb(new Point(-9753, 2611))) .build() ) ); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatterTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatterTests.java index b8800713eca89..482ff84e1fd30 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatterTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatterTests.java @@ -47,8 +47,8 @@ public class TextFormatterTests extends ESTestCase { private static final BytesRefArray geoPoints = new BytesRefArray(2, BigArrays.NON_RECYCLING_INSTANCE); static { - geoPoints.append(GEO.pointAsWKB(new Point(12, 56))); - geoPoints.append(GEO.pointAsWKB(new Point(-97, 26))); + geoPoints.append(GEO.asWkb(new Point(12, 56))); + geoPoints.append(GEO.asWkb(new Point(-97, 26))); } EsqlQueryResponse esqlResponse = new EsqlQueryResponse( @@ -72,8 +72,8 @@ public class TextFormatterTests extends ESTestCase { ).asBlock(), blockFactory.newBytesRefArrayVector(geoPoints, 2).asBlock(), blockFactory.newBytesRefBlockBuilder(2) - .appendBytesRef(CARTESIAN.pointAsWKB(new Point(1234, 5678))) - .appendBytesRef(CARTESIAN.pointAsWKB(new Point(-9753, 2611))) + .appendBytesRef(CARTESIAN.asWkb(new Point(1234, 5678))) + .appendBytesRef(CARTESIAN.asWkb(new Point(-9753, 2611))) .build(), blockFactory.newConstantNullBlock(2) ) @@ -146,8 +146,8 @@ public void testFormatWithoutHeader() { ).asBlock(), blockFactory.newBytesRefArrayVector(geoPoints, 2).asBlock(), blockFactory.newBytesRefBlockBuilder(2) - .appendBytesRef(CARTESIAN.pointAsWKB(new Point(1234, 5678))) - .appendBytesRef(CARTESIAN.pointAsWKB(new Point(-9753, 2611))) + .appendBytesRef(CARTESIAN.asWkb(new Point(1234, 5678))) + .appendBytesRef(CARTESIAN.asWkb(new Point(-9753, 2611))) .build(), blockFactory.newConstantNullBlock(2) ) diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/util/SpatialCoordinateTypes.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/util/SpatialCoordinateTypes.java index 48fb3a34469fb..6508a67f7e785 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/util/SpatialCoordinateTypes.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/util/SpatialCoordinateTypes.java @@ -62,18 +62,6 @@ public long pointAsLong(double x, double y) { public abstract long pointAsLong(double x, double y); - public String pointAsString(Point point) { - return WellKnownText.toWKT(point); - } - - public BytesRef pointAsWKB(Point point) { - return new BytesRef(WellKnownBinary.toWKB(point, ByteOrder.LITTLE_ENDIAN)); - } - - public BytesRef longAsWKB(long encoded) { - return pointAsWKB(longAsPoint(encoded)); - } - public long wkbAsLong(BytesRef wkb) { Geometry geometry = WellKnownBinary.fromWKB(GeometryValidator.NOOP, false, wkb.bytes, wkb.offset, wkb.length); if (geometry instanceof Point point) { @@ -83,18 +71,30 @@ public long wkbAsLong(BytesRef wkb) { } } - public BytesRef stringAsWKB(String string) { + public BytesRef longAsWkb(long encoded) { + return asWkb(longAsPoint(encoded)); + } + + public String asWkt(Geometry geometry) { + return WellKnownText.toWKT(geometry); + } + + public BytesRef asWkb(Geometry geometry) { + return new BytesRef(WellKnownBinary.toWKB(geometry, ByteOrder.LITTLE_ENDIAN)); + } + + public BytesRef wktToWkb(String wkt) { // TODO: we should be able to transform WKT to WKB without building the geometry // we should as well use different validator for cartesian and geo? try { - Geometry geometry = WellKnownText.fromWKT(GeometryValidator.NOOP, false, string); + Geometry geometry = WellKnownText.fromWKT(GeometryValidator.NOOP, false, wkt); return new BytesRef(WellKnownBinary.toWKB(geometry, ByteOrder.LITTLE_ENDIAN)); } catch (Exception e) { throw new IllegalArgumentException("Failed to parse WKT: " + e.getMessage(), e); } } - public String wkbAsString(BytesRef wkb) { + public String wkbToWkt(BytesRef wkb) { return WellKnownText.fromWKB(wkb.bytes, wkb.offset, wkb.length); } } diff --git a/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/util/SpatialCoordinateTypesTests.java b/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/util/SpatialCoordinateTypesTests.java index ca650bf29662f..fa53027e43901 100644 --- a/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/util/SpatialCoordinateTypesTests.java +++ b/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/util/SpatialCoordinateTypesTests.java @@ -55,7 +55,7 @@ public void testParsing() { for (int i = 0; i < 10; i++) { SpatialCoordinateTypes coordType = type.getKey(); Point point = type.getValue().randomPoint.get(); - assertEquals(coordType.wkbAsString(coordType.pointAsWKB(point)), coordType.pointAsString(point)); + assertEquals(coordType.wkbToWkt(coordType.asWkb(point)), coordType.asWkt(point)); } } } From fdf83a4dd3b0310199f8d35abf030c91790009a1 Mon Sep 17 00:00:00 2001 From: David Roberts Date: Wed, 10 Jan 2024 16:45:49 +0000 Subject: [PATCH 13/75] [ML] Ignore failed jobs in unassigned task logging (#104199) The logging of unassigned tasks added in #100154 should ignore failed jobs. Failed jobs may or may not be assigned to a node, but if they cannot be assigned they will not trigger a scale up by autoscaling, so never will be assigned. Therefore it's wrong to consider the fact that they are unassigned as a problem. (The fact that they are failed may be a problem, but that is monitored elsewhere.) --- .../ml/dataframe/DataFrameAnalyticsTaskState.java | 5 +++++ .../xpack/core/ml/job/config/JobTaskState.java | 5 +++++ .../xpack/core/ml/utils/MlTaskState.java | 5 +++++ .../elasticsearch/xpack/ml/MlAssignmentNotifier.java | 5 +++++ .../xpack/ml/MlAssignmentNotifierTests.java | 12 ++++++++++++ 5 files changed, 32 insertions(+) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/DataFrameAnalyticsTaskState.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/DataFrameAnalyticsTaskState.java index 8d4b601a38aad..ad2d03a4b8f75 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/DataFrameAnalyticsTaskState.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/DataFrameAnalyticsTaskState.java @@ -110,6 +110,11 @@ public Instant getLastStateChangeTime() { return lastStateChangeTime; } + @Override + public boolean isFailed() { + return DataFrameAnalyticsState.FAILED.equals(state); + } + public boolean isStatusStale(PersistentTasksCustomMetadata.PersistentTask task) { return allocationId != task.getAllocationId(); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/JobTaskState.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/JobTaskState.java index c07cb0cf9c91a..41fd38ca1398c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/JobTaskState.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/JobTaskState.java @@ -106,6 +106,11 @@ public Instant getLastStateChangeTime() { return lastStateChangeTime; } + @Override + public boolean isFailed() { + return JobState.FAILED.equals(state); + } + /** * The job state stores the allocation ID at the time it was last set. * This method compares the allocation ID in the state with the allocation diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlTaskState.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlTaskState.java index 09a7d3827caf2..7600dac441162 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlTaskState.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlTaskState.java @@ -18,4 +18,9 @@ public interface MlTaskState { */ @Nullable Instant getLastStateChangeTime(); + + /** + * @return Is the task in the failed state? + */ + boolean isFailed(); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlAssignmentNotifier.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlAssignmentNotifier.java index 3df836e5f9043..bb7c79ff2ec07 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlAssignmentNotifier.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlAssignmentNotifier.java @@ -24,6 +24,7 @@ import org.elasticsearch.xpack.core.ml.action.StartDataFrameAnalyticsAction; import org.elasticsearch.xpack.core.ml.action.StartDatafeedAction; import org.elasticsearch.xpack.core.ml.utils.MlTaskParams; +import org.elasticsearch.xpack.core.ml.utils.MlTaskState; import org.elasticsearch.xpack.ml.notifications.AnomalyDetectionAuditor; import org.elasticsearch.xpack.ml.notifications.DataFrameAnalyticsAuditor; @@ -298,6 +299,10 @@ synchronized List findLongTimeUnassignedTasks(Instant now, PersistentTas if (task.getExecutorNode() == null) { final String taskName = task.getTaskName(); if (MlTasks.JOB_TASK_NAME.equals(taskName) || MlTasks.DATA_FRAME_ANALYTICS_TASK_NAME.equals(taskName)) { + // Ignore failed tasks - they don't need to be assigned to a node + if (((MlTaskState) task.getState()).isFailed()) { + continue; + } final String mlId = ((MlTaskParams) task.getParams()).getMlId(); final TaskNameAndId key = new TaskNameAndId(taskName, mlId); final UnassignedTimeAndReportTime previousInfo = oldUnassignedInfoByTask.get(key); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlAssignmentNotifierTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlAssignmentNotifierTests.java index a393f691ae004..f0ac79de17cab 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlAssignmentNotifierTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlAssignmentNotifierTests.java @@ -253,6 +253,7 @@ public void testFindLongTimeUnassignedTasks() { Instant eightHoursAgo = now.minus(Duration.ofHours(8)); Instant sevenHoursAgo = eightHoursAgo.plus(Duration.ofHours(1)); Instant twoHoursAgo = sevenHoursAgo.plus(Duration.ofHours(5)); + Instant tomorrow = now.plus(Duration.ofHours(24)); PersistentTasksCustomMetadata.Builder tasksBuilder = PersistentTasksCustomMetadata.builder(); addJobTask("job1", "node1", JobState.OPENED, tasksBuilder); @@ -304,5 +305,16 @@ public void testFindLongTimeUnassignedTasks() { "[xpack/ml/job]/[job3] unassigned for [28800] seconds" ) ); + + tasksBuilder = PersistentTasksCustomMetadata.builder(); + addJobTask("job1", null, JobState.FAILED, tasksBuilder); + addJobTask("job2", null, JobState.FAILED, tasksBuilder); + addJobTask("job3", null, JobState.FAILED, tasksBuilder); + addJobTask("job4", null, JobState.FAILED, tasksBuilder); + addJobTask("job5", "node1", JobState.FAILED, tasksBuilder); + itemsToReport = notifier.findLongTimeUnassignedTasks(tomorrow, tasksBuilder.build()); + // We still have unassigned jobs, but now all the jobs are failed, so none should be reported as unassigned + // as it doesn't make any difference whether they're assigned or not and autoscaling will ignore them + assertThat(itemsToReport, empty()); } } From b5a617989dfdaaaa5a7c2dc56999a5f99f8af262 Mon Sep 17 00:00:00 2001 From: David Turner Date: Wed, 10 Jan 2024 16:59:44 +0000 Subject: [PATCH 14/75] Reduce dependencies on `SearchTransportService` (#104188) No need to depend on the `SearchTransportService` in these cases, the basic `TransportService` exposes the `RemoteClusterService` already. --- .../admin/cluster/remote/TransportRemoteInfoAction.java | 9 ++------- .../core/termsenum/action/TransportTermsEnumAction.java | 4 +--- 2 files changed, 3 insertions(+), 10 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/remote/TransportRemoteInfoAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/remote/TransportRemoteInfoAction.java index 6e95a9807c4b2..ae7453b7c8cee 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/remote/TransportRemoteInfoAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/remote/TransportRemoteInfoAction.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionType; -import org.elasticsearch.action.search.SearchTransportService; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.cluster.node.DiscoveryNodeRole; @@ -28,13 +27,9 @@ public final class TransportRemoteInfoAction extends HandledTransportAction Date: Wed, 10 Jan 2024 18:06:27 +0000 Subject: [PATCH 15/75] Remove incorrect is-not-empty asserts in EsqlAsyncActionIT (#104217) This commit removes a couple of incorrect is-not-empty asserts in EsqlAsyncActionIT. --- .../org/elasticsearch/xpack/esql/action/EsqlAsyncActionIT.java | 3 --- 1 file changed, 3 deletions(-) diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlAsyncActionIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlAsyncActionIT.java index f13321f03f0fe..689672075fb03 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlAsyncActionIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlAsyncActionIT.java @@ -40,7 +40,6 @@ import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.is; import static org.hamcrest.core.IsEqual.equalTo; -import static org.hamcrest.core.IsNot.not; /** * Runs test scenarios from EsqlActionIT, with an extra level of indirection @@ -74,8 +73,6 @@ protected EsqlQueryResponse run(String esqlCommands, QueryPragmas pragmas, Query String id = response.asyncExecutionId().get(); if (response.isRunning() == false) { assertThat(request.keepOnCompletion(), is(true)); - assertThat(response.columns(), is(not(empty()))); - assertThat(response.pages(), is(not(empty()))); initialColumns = List.copyOf(response.columns()); initialPages = deepCopyOf(response.pages(), TestBlockFactory.getNonBreakingInstance()); } else { From f33122a191c664622295adf6c38ee37973b81f6e Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Wed, 10 Jan 2024 10:16:51 -0800 Subject: [PATCH 16/75] Disable BWC tests from "monolithic" CI jobs (#104221) --- .buildkite/pipelines/periodic.template.yml | 4 ++-- .buildkite/pipelines/periodic.yml | 4 ++-- .buildkite/scripts/encryption-at-rest.sh | 2 +- .buildkite/scripts/release-tests.sh | 2 +- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.buildkite/pipelines/periodic.template.yml b/.buildkite/pipelines/periodic.template.yml index 40bc6277379f5..a92e190be7963 100644 --- a/.buildkite/pipelines/periodic.template.yml +++ b/.buildkite/pipelines/periodic.template.yml @@ -2,7 +2,7 @@ steps: - group: bwc steps: $BWC_STEPS - label: concurrent-search-tests - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dtests.jvm.argline=-Des.concurrent_search=true -Des.concurrent_search=true check + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dtests.jvm.argline=-Des.concurrent_search=true -Des.concurrent_search=true functionalTests timeout_in_minutes: 420 agents: provider: gcp @@ -97,7 +97,7 @@ steps: diskSizeGb: 350 machineType: custom-32-98304 - label: single-processor-node-tests - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dtests.configure_test_clusters_with_one_processor=true check + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dtests.configure_test_clusters_with_one_processor=true functionalTests timeout_in_minutes: 420 agents: provider: gcp diff --git a/.buildkite/pipelines/periodic.yml b/.buildkite/pipelines/periodic.yml index 07bc1de57b752..97d174b546e4f 100644 --- a/.buildkite/pipelines/periodic.yml +++ b/.buildkite/pipelines/periodic.yml @@ -1123,7 +1123,7 @@ steps: env: BWC_VERSION: 8.13.0 - label: concurrent-search-tests - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dtests.jvm.argline=-Des.concurrent_search=true -Des.concurrent_search=true check + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dtests.jvm.argline=-Des.concurrent_search=true -Des.concurrent_search=true functionalTests timeout_in_minutes: 420 agents: provider: gcp @@ -1218,7 +1218,7 @@ steps: diskSizeGb: 350 machineType: custom-32-98304 - label: single-processor-node-tests - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dtests.configure_test_clusters_with_one_processor=true check + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dtests.configure_test_clusters_with_one_processor=true functionalTests timeout_in_minutes: 420 agents: provider: gcp diff --git a/.buildkite/scripts/encryption-at-rest.sh b/.buildkite/scripts/encryption-at-rest.sh index 6aa0a3b174fe0..a003107cb00f6 100755 --- a/.buildkite/scripts/encryption-at-rest.sh +++ b/.buildkite/scripts/encryption-at-rest.sh @@ -22,4 +22,4 @@ touch .output.log rm -Rf "$WORKSPACE" ln -s "$PWD" "$WORKSPACE" -.ci/scripts/run-gradle.sh -Dbwc.checkout.align=true check \ No newline at end of file +.ci/scripts/run-gradle.sh -Dbwc.checkout.align=true functionalTests diff --git a/.buildkite/scripts/release-tests.sh b/.buildkite/scripts/release-tests.sh index aa5c50d2e87c1..e4185c642f244 100755 --- a/.buildkite/scripts/release-tests.sh +++ b/.buildkite/scripts/release-tests.sh @@ -20,4 +20,4 @@ curl --fail -o "${ML_IVY_REPO}/maven/org/elasticsearch/ml/ml-cpp/${ES_VERSION}/m curl --fail -o "${ML_IVY_REPO}/maven/org/elasticsearch/ml/ml-cpp/${ES_VERSION}/ml-cpp-${ES_VERSION}.zip" https://artifacts-snapshot.elastic.co/ml-cpp/${ES_VERSION}-SNAPSHOT/downloads/ml-cpp/ml-cpp-${ES_VERSION}-SNAPSHOT.zip .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dbuild.snapshot=false -Dbuild.ml_cpp.repo=file://${ML_IVY_REPO} \ - -Dtests.jvm.argline=-Dbuild.snapshot=false -Dlicense.key=${WORKSPACE}/x-pack/license-tools/src/test/resources/public.key -Dbuild.id=deadbeef build \ No newline at end of file + -Dtests.jvm.argline=-Dbuild.snapshot=false -Dlicense.key=${WORKSPACE}/x-pack/license-tools/src/test/resources/public.key -Dbuild.id=deadbeef assemble functionalTests From f6f86d11d3553f2b37e4a1e0a79a4c2953a2dd67 Mon Sep 17 00:00:00 2001 From: Volodymyr Krasnikov <129072588+volodk85@users.noreply.github.com> Date: Wed, 10 Jan 2024 11:03:52 -0800 Subject: [PATCH 17/75] Calc follower vs leader indexing lag based on shard global checkpoints (#104015) * Calc follower vs leader indexing lag baed on shard global checkpoints * code simplify * follow PR comments * spotless * Update docs/reference/ccr/apis/follow/get-follow-stats.asciidoc Co-authored-by: Iraklis Psaroudakis --------- Co-authored-by: Iraklis Psaroudakis --- .../ccr/apis/follow/get-follow-stats.asciidoc | 5 +++++ docs/reference/ccr/apis/get-ccr-stats.asciidoc | 2 ++ .../rest-api-spec/test/ccr/follow_stats.yml | 1 + .../xpack/core/ccr/action/FollowStatsAction.java | 13 ++++++++++++- 4 files changed, 20 insertions(+), 1 deletion(-) diff --git a/docs/reference/ccr/apis/follow/get-follow-stats.asciidoc b/docs/reference/ccr/apis/follow/get-follow-stats.asciidoc index 99488afeb98ee..c910b0431a6ea 100644 --- a/docs/reference/ccr/apis/follow/get-follow-stats.asciidoc +++ b/docs/reference/ccr/apis/follow/get-follow-stats.asciidoc @@ -75,6 +75,9 @@ task. In this situation, the following task must be resumed manually with the `index`:: (string) The name of the follower index. +`total_global_checkpoint_lag`:: +(long) Indication of how much the follower is lagging the leader. This is the sum of the difference between the `leader_global_checkpoint` and the `follower_global_checkpoint` for all shards. + //Begin shards `shards`:: (array) An array of shard-level following task statistics. @@ -219,6 +222,7 @@ The API returns the following results: "indices" : [ { "index" : "follower_index", + "total_global_checkpoint_lag" : 256, "shards" : [ { "remote_cluster" : "remote_cluster", @@ -255,6 +259,7 @@ The API returns the following results: ] } -------------------------------------------------- +// TESTRESPONSE[s/"total_global_checkpoint_lag" : 256/"total_global_checkpoint_lag" : 0/] // TESTRESPONSE[s/"leader_global_checkpoint" : 1024/"leader_global_checkpoint" : $body.indices.0.shards.0.leader_global_checkpoint/] // TESTRESPONSE[s/"leader_max_seq_no" : 1536/"leader_max_seq_no" : $body.indices.0.shards.0.leader_max_seq_no/] // TESTRESPONSE[s/"follower_global_checkpoint" : 768/"follower_global_checkpoint" : $body.indices.0.shards.0.follower_global_checkpoint/] diff --git a/docs/reference/ccr/apis/get-ccr-stats.asciidoc b/docs/reference/ccr/apis/get-ccr-stats.asciidoc index 6d43e089c2471..02f5cf886049d 100644 --- a/docs/reference/ccr/apis/get-ccr-stats.asciidoc +++ b/docs/reference/ccr/apis/get-ccr-stats.asciidoc @@ -112,6 +112,7 @@ The API returns the following results: "indices" : [ { "index" : "follower_index", + "total_global_checkpoint_lag" : 256, "shards" : [ { "remote_cluster" : "remote_cluster", @@ -149,6 +150,7 @@ The API returns the following results: } } -------------------------------------------------- +// TESTRESPONSE[s/"total_global_checkpoint_lag" : 256/"total_global_checkpoint_lag" : 0/] // TESTRESPONSE[s/"number_of_failed_follow_indices" : 0/"number_of_failed_follow_indices" : $body.auto_follow_stats.number_of_failed_follow_indices/] // TESTRESPONSE[s/"number_of_failed_remote_cluster_state_requests" : 0/"number_of_failed_remote_cluster_state_requests" : $body.auto_follow_stats.number_of_failed_remote_cluster_state_requests/] // TESTRESPONSE[s/"number_of_successful_follow_indices" : 1/"number_of_successful_follow_indices" : $body.auto_follow_stats.number_of_successful_follow_indices/] diff --git a/x-pack/plugin/ccr/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/ccr/follow_stats.yml b/x-pack/plugin/ccr/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/ccr/follow_stats.yml index df74a608dbe84..fb37e9a05c5cf 100644 --- a/x-pack/plugin/ccr/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/ccr/follow_stats.yml +++ b/x-pack/plugin/ccr/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/ccr/follow_stats.yml @@ -54,6 +54,7 @@ ccr.follow_stats: index: bar - match: { indices.0.index: "bar" } + - match: { indices.0.total_global_checkpoint_lag: 0 } - match: { indices.0.shards.0.leader_index: "foo" } - match: { indices.0.shards.0.follower_index: "bar" } - match: { indices.0.shards.0.shard_id: 0 } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/FollowStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/FollowStatsAction.java index 726257910c9a5..1fdee42e7e18e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/FollowStatsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/FollowStatsAction.java @@ -92,7 +92,10 @@ private static Iterator innerToXContentChunked(Map Iterators.concat( Iterators.single( - (builder, params) -> builder.startObject().field("index", indexEntry.getKey()).startArray("shards") + (builder, params) -> builder.startObject() + .field("index", indexEntry.getKey()) + .field("total_global_checkpoint_lag", calcFollowerToLeaderLaggingOps(indexEntry.getValue())) + .startArray("shards") ), indexEntry.getValue().values().iterator(), Iterators.single((builder, params) -> builder.endArray().endObject()) @@ -102,6 +105,14 @@ private static Iterator innerToXContentChunked(Map followShardTaskStats) { + return followShardTaskStats.values() + .stream() + .map(StatsResponse::status) + .mapToLong(s -> s.leaderGlobalCheckpoint() - s.followerGlobalCheckpoint()) + .sum(); + } + @Override public boolean equals(Object o) { if (this == o) return true; From 5a71fbe37014465dfe39f44b5a54c7f8ac1a2c28 Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Wed, 10 Jan 2024 14:17:35 -0500 Subject: [PATCH 18/75] Correct profiled rewrite time for knn with a pre-filter (#104150) kNN profiling will fire multiple rewrite actions in a row without stopping a previous one. Right now, this means in production, the rewrite times are inaccurate for knn with a prefilter as we inaccurately calculate the total time (prefilter rewrite & knn rewrite). This adjusts the interface for rewriting timing to allow concurrent rewriters which then each can update the total rewrite time that we return to the user. Relates to: https://github.com/elastic/elasticsearch/issues/104131 --- docs/changelog/104150.yaml | 5 ++++ .../search/profile/dfs/DfsProfilerIT.java | 24 ++++++++-------- .../search/internal/ContextIndexSearcher.java | 5 ++-- .../query/InternalQueryProfileTree.java | 28 +++++++++++-------- .../search/profile/query/QueryProfiler.java | 13 +++++---- 5 files changed, 42 insertions(+), 33 deletions(-) create mode 100644 docs/changelog/104150.yaml diff --git a/docs/changelog/104150.yaml b/docs/changelog/104150.yaml new file mode 100644 index 0000000000000..c910542dcf7f6 --- /dev/null +++ b/docs/changelog/104150.yaml @@ -0,0 +1,5 @@ +pr: 104150 +summary: Correct profiled rewrite time for knn with a pre-filter +area: Search +type: bug +issues: [] diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/profile/dfs/DfsProfilerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/profile/dfs/DfsProfilerIT.java index c6d3a6733d2fc..7e504a100ba56 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/profile/dfs/DfsProfilerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/profile/dfs/DfsProfilerIT.java @@ -65,25 +65,23 @@ public void testProfileDfs() throws Exception { int iters = between(5, 10); for (int i = 0; i < iters; i++) { QueryBuilder q = randomQueryBuilder(List.of(textField), List.of(numericField), numDocs, 3); + KnnSearchBuilder knnSearchBuilder = new KnnSearchBuilder( + vectorField, + new float[] { randomFloat(), randomFloat(), randomFloat() }, + randomIntBetween(5, 10), + 50, + randomBoolean() ? null : randomFloat() + ); + if (randomBoolean()) { + knnSearchBuilder.addFilterQuery(q); + } logger.info("Query: {}", q); assertResponse( prepareSearch().setQuery(q) .setTrackTotalHits(true) .setProfile(true) .setSearchType(SearchType.DFS_QUERY_THEN_FETCH) - .setKnnSearch( - randomList( - 2, - 5, - () -> new KnnSearchBuilder( - vectorField, - new float[] { randomFloat(), randomFloat(), randomFloat() }, - randomIntBetween(5, 10), - 50, - randomBoolean() ? null : randomFloat() - ) - ) - ), + .setKnnSearch(randomList(2, 5, () -> knnSearchBuilder)), response -> { assertNotNull("Profile response element should not be null", response.getProfileResults()); assertThat("Profile response should not be an empty array", response.getProfileResults().size(), not(0)); diff --git a/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java b/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java index d6a3334dd035b..d834c12d0abe1 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java +++ b/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java @@ -194,8 +194,9 @@ public void setAggregatedDfs(AggregatedDfs aggregatedDfs) { @Override public Query rewrite(Query original) throws IOException { + Timer rewriteTimer = null; if (profiler != null) { - profiler.startRewriteTime(); + rewriteTimer = profiler.startRewriteTime(); } try { return super.rewrite(original); @@ -204,7 +205,7 @@ public Query rewrite(Query original) throws IOException { return new MatchNoDocsQuery("rewrite timed out"); } finally { if (profiler != null) { - profiler.stopAndAddRewriteTime(); + profiler.stopAndAddRewriteTime(rewriteTimer); } } } diff --git a/server/src/main/java/org/elasticsearch/search/profile/query/InternalQueryProfileTree.java b/server/src/main/java/org/elasticsearch/search/profile/query/InternalQueryProfileTree.java index 686e628d7faef..14ff1aaa8a34f 100644 --- a/server/src/main/java/org/elasticsearch/search/profile/query/InternalQueryProfileTree.java +++ b/server/src/main/java/org/elasticsearch/search/profile/query/InternalQueryProfileTree.java @@ -11,6 +11,9 @@ import org.apache.lucene.search.Query; import org.elasticsearch.search.profile.AbstractInternalProfileTree; import org.elasticsearch.search.profile.ProfileResult; +import org.elasticsearch.search.profile.Timer; + +import java.util.concurrent.atomic.AtomicLong; /** * This class tracks the dependency tree for queries (scoring and rewriting) and @@ -20,8 +23,7 @@ final class InternalQueryProfileTree extends AbstractInternalProfileTree { /** Rewrite time */ - private long rewriteTime; - private long rewriteScratch; + private final AtomicLong rewriteTime = new AtomicLong(0L); @Override protected QueryProfileBreakdown createProfileBreakdown() { @@ -44,11 +46,12 @@ protected String getDescriptionFromElement(Query query) { } /** - * Begin timing a query for a specific Timing context + * Begin timing a query for a specific Timing context and return the running timer */ - public void startRewriteTime() { - assert rewriteScratch == 0; - rewriteScratch = System.nanoTime(); + public Timer startRewriteTime() { + Timer timer = new Timer(); + timer.start(); + return timer; } /** @@ -59,14 +62,15 @@ public void startRewriteTime() { * * @return The elapsed time */ - public long stopAndAddRewriteTime() { - long time = Math.max(1, System.nanoTime() - rewriteScratch); - rewriteTime += time; - rewriteScratch = 0; - return time; + public long stopAndAddRewriteTime(Timer timer) { + timer.stop(); + assert timer.getCount() == 1L : "stopAndAddRewriteTime() called without a matching startRewriteTime()"; + long time = Math.max(1, timer.getApproximateTiming()); + return rewriteTime.addAndGet(time); } public long getRewriteTime() { - return rewriteTime; + return rewriteTime.get(); } + } diff --git a/server/src/main/java/org/elasticsearch/search/profile/query/QueryProfiler.java b/server/src/main/java/org/elasticsearch/search/profile/query/QueryProfiler.java index a40b1284238b2..e1933f4552485 100644 --- a/server/src/main/java/org/elasticsearch/search/profile/query/QueryProfiler.java +++ b/server/src/main/java/org/elasticsearch/search/profile/query/QueryProfiler.java @@ -10,8 +10,9 @@ import org.apache.lucene.search.Query; import org.elasticsearch.search.profile.AbstractProfiler; +import org.elasticsearch.search.profile.Timer; -import java.util.Objects; +import static java.util.Objects.requireNonNull; /** * This class acts as a thread-local storage for profiling a query. It also @@ -50,15 +51,15 @@ public void setCollectorResult(CollectorResult collectorResult) { if (this.collectorResult != null) { throw new IllegalStateException("The collector result can only be set once."); } - this.collectorResult = Objects.requireNonNull(collectorResult); + this.collectorResult = requireNonNull(collectorResult); } /** * Begin timing the rewrite phase of a request. All rewrites are accumulated together into a * single metric */ - public void startRewriteTime() { - ((InternalQueryProfileTree) profileTree).startRewriteTime(); + public Timer startRewriteTime() { + return ((InternalQueryProfileTree) profileTree).startRewriteTime(); } /** @@ -67,8 +68,8 @@ public void startRewriteTime() { * * @return cumulative rewrite time */ - public long stopAndAddRewriteTime() { - return ((InternalQueryProfileTree) profileTree).stopAndAddRewriteTime(); + public long stopAndAddRewriteTime(Timer rewriteTimer) { + return ((InternalQueryProfileTree) profileTree).stopAndAddRewriteTime(requireNonNull(rewriteTimer)); } /** From c14e300471cf2d6cac97a5e245287ba2110ba131 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Wed, 10 Jan 2024 12:22:02 -0800 Subject: [PATCH 19/75] Skip heap attack tests in release builds (#104228) We need to add a test plugin in the heap attack tests. However, we are not loading test plugins in the release builds; hence, we need to skip this suite. --- test/external-modules/esql-heap-attack/build.gradle | 3 +++ 1 file changed, 3 insertions(+) diff --git a/test/external-modules/esql-heap-attack/build.gradle b/test/external-modules/esql-heap-attack/build.gradle index b9cead7b2318a..9f1cdfac61aa1 100644 --- a/test/external-modules/esql-heap-attack/build.gradle +++ b/test/external-modules/esql-heap-attack/build.gradle @@ -5,11 +5,13 @@ * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ +import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.internal-java-rest-test' // Necessary to use tests in Serverless apply plugin: 'elasticsearch.internal-test-artifact' + esplugin { description 'A test module that can trigger out of memory' classname 'org.elasticsearch.test.esql.heap_attack.HeapAttackPlugin' @@ -17,4 +19,5 @@ esplugin { tasks.named('javaRestTest') { usesDefaultDistribution() + it.onlyIf("snapshot build") { BuildParams.isSnapshotBuild() } } From c2426ce8865780bc4610c34a6ae77680604c7aca Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Wed, 10 Jan 2024 15:58:12 -0500 Subject: [PATCH 20/75] ESQL: Run yaml tests async (#104205) This adds tests that run the our suite of yaml tests against the ESQL async endpoint. That's quite nice because the yaml tests are where we handle lots of fun error cases and this'll make sure async does sensible things in those cases. --- .../rest-api-spec/api/esql.async_query.json | 39 +++++++++ .../api/esql.async_query_get.json | 39 +++++++++ .../rest-api-spec/api/esql.query.json | 2 +- .../rest/yaml/section/ApiCallSection.java | 13 +++ .../test/rest/yaml/section/DoSection.java | 18 ++-- .../single_node/AbstractEsqlClientYamlIT.java | 47 +++++++++++ .../qa/single_node/EsqlClientYamlAsyncIT.java | 84 +++++++++++++++++++ .../EsqlClientYamlAsyncSubmitAndFetchIT.java | 59 +++++++++++++ .../esql/qa/single_node/EsqlClientYamlIT.java | 31 +------ 9 files changed, 295 insertions(+), 37 deletions(-) create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/esql.async_query.json create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/esql.async_query_get.json create mode 100644 x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/AbstractEsqlClientYamlIT.java create mode 100644 x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlClientYamlAsyncIT.java create mode 100644 x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlClientYamlAsyncSubmitAndFetchIT.java diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/esql.async_query.json b/rest-api-spec/src/main/resources/rest-api-spec/api/esql.async_query.json new file mode 100644 index 0000000000000..a2bcf67e8611c --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/esql.async_query.json @@ -0,0 +1,39 @@ +{ + "esql.async_query":{ + "documentation":{ + "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/esql-async-query-api.html", + "description":"Executes an ESQL request asynchronously" + }, + "stability":"experimental", + "visibility":"public", + "headers":{ + "accept": ["application/json"], + "content_type": ["application/json"] + }, + "url":{ + "paths":[ + { + "path":"/_query/async", + "methods":[ + "POST" + ] + } + ] + }, + "params":{ + "format":{ + "type":"string", + "description":"a short version of the Accept header, e.g. json, yaml" + }, + "delimiter":{ + "type":"string", + "description":"The character to use between values within a CSV row. Only valid for the csv format.", + "default":false + } + }, + "body":{ + "description":"Use the `query` element to start a query. Use `columnar` to format the answer.", + "required":true + } + } +} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/esql.async_query_get.json b/rest-api-spec/src/main/resources/rest-api-spec/api/esql.async_query_get.json new file mode 100644 index 0000000000000..bf38522cfb448 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/esql.async_query_get.json @@ -0,0 +1,39 @@ +{ + "esql.async_query_get":{ + "documentation":{ + "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/esql-async-query-get-api.html", + "description": "Retrieves the results of a previously submitted async query request given its ID." + }, + "stability":"stable", + "visibility":"public", + "headers":{ + "accept": [ "application/json"] + }, + "url":{ + "paths":[ + { + "path":"/_query/async/{id}", + "methods":[ + "GET" + ], + "parts":{ + "id":{ + "type":"string", + "description":"The async query ID" + } + } + } + ] + }, + "params":{ + "wait_for_completion_timeout":{ + "type":"time", + "description":"Specify the time that the request should block waiting for the final response" + }, + "keep_alive": { + "type": "time", + "description": "Specify the time interval in which the results (partial or final) for this search will be available" + } + } + } +} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/esql.query.json b/rest-api-spec/src/main/resources/rest-api-spec/api/esql.query.json index c038ac4f3b749..8810746851468 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/esql.query.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/esql.query.json @@ -32,7 +32,7 @@ } }, "body":{ - "description":"Use the `query` element to start a query. Use `time_zone` to specify an execution time zone and `columnar` to format the answer.", + "description":"Use the `query` element to start a query. Use `columnar` to format the answer.", "required":true } } diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/ApiCallSection.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/ApiCallSection.java index e152f626b8541..1708c5977486d 100644 --- a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/ApiCallSection.java +++ b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/ApiCallSection.java @@ -32,6 +32,19 @@ public ApiCallSection(String api) { this.api = api; } + public ApiCallSection copyWithNewApi(String api) { + ApiCallSection copy = new ApiCallSection(api); + for (var e : params.entrySet()) { + copy.addParam(e.getKey(), e.getValue()); + } + copy.addHeaders(headers); + for (var b : bodies) { + copy.addBody(b); + } + copy.nodeSelector = nodeSelector; + return copy; + } + public String getApi() { return api; } diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java index b1a9142596d67..a23a433f812c2 100644 --- a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java +++ b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java @@ -266,7 +266,7 @@ public ApiCallSection getApiCallSection() { return apiCallSection; } - void setApiCallSection(ApiCallSection apiCallSection) { + public void setApiCallSection(ApiCallSection apiCallSection) { this.apiCallSection = apiCallSection; } @@ -275,7 +275,7 @@ void setApiCallSection(ApiCallSection apiCallSection) { * If the headers don't match exactly this request is considered to have failed. * Defaults to emptyList. */ - List getExpectedWarningHeaders() { + public List getExpectedWarningHeaders() { return expectedWarningHeaders; } @@ -284,7 +284,7 @@ List getExpectedWarningHeaders() { * If the headers don't match this request is considered to have failed. * Defaults to emptyList. */ - List getExpectedWarningHeadersRegex() { + public List getExpectedWarningHeadersRegex() { return expectedWarningHeadersRegex; } @@ -292,7 +292,7 @@ List getExpectedWarningHeadersRegex() { * Set the warning headers that we expect from this response. If the headers don't match exactly this request is considered to have * failed. Defaults to emptyList. */ - void setExpectedWarningHeaders(List expectedWarningHeaders) { + public void setExpectedWarningHeaders(List expectedWarningHeaders) { this.expectedWarningHeaders = expectedWarningHeaders; } @@ -300,7 +300,7 @@ void setExpectedWarningHeaders(List expectedWarningHeaders) { * Set the warning headers patterns that we expect from this response. If the headers don't match this request is considered to have * failed. Defaults to emptyList. */ - void setExpectedWarningHeadersRegex(List expectedWarningHeadersRegex) { + public void setExpectedWarningHeadersRegex(List expectedWarningHeadersRegex) { this.expectedWarningHeadersRegex = expectedWarningHeadersRegex; } @@ -308,7 +308,7 @@ void setExpectedWarningHeadersRegex(List expectedWarningHeadersRegex) { * Warning headers that we allow from this response. These warning * headers don't cause the test to fail. Defaults to emptyList. */ - List getAllowedWarningHeaders() { + public List getAllowedWarningHeaders() { return allowedWarningHeaders; } @@ -316,7 +316,7 @@ List getAllowedWarningHeaders() { * Warning headers that we allow from this response. These warning * headers don't cause the test to fail. Defaults to emptyList. */ - List getAllowedWarningHeadersRegex() { + public List getAllowedWarningHeadersRegex() { return allowedWarningHeadersRegex; } @@ -324,7 +324,7 @@ List getAllowedWarningHeadersRegex() { * Set the warning headers that we expect from this response. These * warning headers don't cause the test to fail. Defaults to emptyList. */ - void setAllowedWarningHeaders(List allowedWarningHeaders) { + public void setAllowedWarningHeaders(List allowedWarningHeaders) { this.allowedWarningHeaders = allowedWarningHeaders; } @@ -332,7 +332,7 @@ void setAllowedWarningHeaders(List allowedWarningHeaders) { * Set the warning headers pattern that we expect from this response. These * warning headers don't cause the test to fail. Defaults to emptyList. */ - void setAllowedWarningHeadersRegex(List allowedWarningHeadersRegex) { + public void setAllowedWarningHeadersRegex(List allowedWarningHeadersRegex) { this.allowedWarningHeadersRegex = allowedWarningHeadersRegex; } diff --git a/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/AbstractEsqlClientYamlIT.java b/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/AbstractEsqlClientYamlIT.java new file mode 100644 index 0000000000000..70afdf32d3808 --- /dev/null +++ b/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/AbstractEsqlClientYamlIT.java @@ -0,0 +1,47 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.qa.single_node; + +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.elasticsearch.xpack.esql.qa.rest.EsqlSpecTestCase; +import org.junit.After; +import org.junit.Before; +import org.junit.ClassRule; + +abstract class AbstractEsqlClientYamlIT extends ESClientYamlSuiteTestCase { + + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .setting("xpack.security.enabled", "false") + .setting("xpack.license.self_generated.type", "trial") + .build(); + + @Override + protected final String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + + protected AbstractEsqlClientYamlIT(final ClientYamlTestCandidate testCandidate) { + super(testCandidate); + } + + @Before + @After + private void assertRequestBreakerEmpty() throws Exception { + /* + * This hook is shared by all subclasses. If it is public it we'll + * get complaints that it is inherited. It isn't. Whatever. Making + * it private works - the hook still runs. It just looks strange. + */ + EsqlSpecTestCase.assertRequestBreakerEmpty(); + } +} diff --git a/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlClientYamlAsyncIT.java b/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlClientYamlAsyncIT.java new file mode 100644 index 0000000000000..a38e34d7842d8 --- /dev/null +++ b/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlClientYamlAsyncIT.java @@ -0,0 +1,84 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.qa.single_node; + +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.elasticsearch.test.rest.yaml.section.ApiCallSection; +import org.elasticsearch.test.rest.yaml.section.ClientYamlTestSection; +import org.elasticsearch.test.rest.yaml.section.DoSection; +import org.elasticsearch.test.rest.yaml.section.ExecutableSection; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.function.Function; +import java.util.stream.Stream; + +/** + * Run the ESQL yaml tests against the async esql endpoint with a 30 minute {@code wait_until_completion_timeout}. + * That's far longer than any should take and far longer than any sensible person will wait, but it's simple + * and it makes sure all the yaml tests work when within the timeout. + */ +public class EsqlClientYamlAsyncIT extends AbstractEsqlClientYamlIT { + public EsqlClientYamlAsyncIT(final ClientYamlTestCandidate testCandidate) { + super(testCandidate); + } + + @ParametersFactory + public static Iterable parameters() throws Exception { + return parameters(doSection -> { + ApiCallSection copy = doSection.getApiCallSection().copyWithNewApi("esql.async_query"); + for (Map body : copy.getBodies()) { + body.put("wait_for_completion_timeout", "30m"); + } + doSection.setApiCallSection(copy); + return Stream.of(doSection); + }); + } + + public static Iterable parameters(Function> modify) throws Exception { + List result = new ArrayList<>(); + for (Object[] orig : ESClientYamlSuiteTestCase.createParameters()) { + assert orig.length == 1; + ClientYamlTestCandidate candidate = (ClientYamlTestCandidate) orig[0]; + try { + ClientYamlTestSection modified = new ClientYamlTestSection( + candidate.getTestSection().getLocation(), + candidate.getTestSection().getName(), + candidate.getTestSection().getSkipSection(), + candidate.getTestSection().getExecutableSections().stream().flatMap(e -> modifyExecutableSection(e, modify)).toList() + ); + result.add(new Object[] { new ClientYamlTestCandidate(candidate.getRestTestSuite(), modified) }); + } catch (IllegalArgumentException e) { + throw new IllegalArgumentException("error modifying " + candidate + ": " + e.getMessage(), e); + } + } + return result; + } + + private static Stream modifyExecutableSection( + ExecutableSection e, + Function> modify + ) { + if (false == (e instanceof DoSection)) { + return Stream.of(e); + } + DoSection doSection = (DoSection) e; + String api = doSection.getApiCallSection().getApi(); + return switch (api) { + case "esql.query" -> modify.apply(doSection); + case "esql.async_query", "esql.async_query_get" -> throw new IllegalArgumentException( + "The esql yaml tests can't contain async_query or async_query_get because we modify them on the fly and *add* those." + ); + default -> Stream.of(e); + }; + } +} diff --git a/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlClientYamlAsyncSubmitAndFetchIT.java b/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlClientYamlAsyncSubmitAndFetchIT.java new file mode 100644 index 0000000000000..91f9540008eaa --- /dev/null +++ b/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlClientYamlAsyncSubmitAndFetchIT.java @@ -0,0 +1,59 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.qa.single_node; + +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.section.ApiCallSection; +import org.elasticsearch.test.rest.yaml.section.DoSection; + +import java.util.List; +import java.util.Map; +import java.util.stream.Stream; + +/** + * Run the ESQL yaml tests async and then fetch the results with a long wait time. + */ +public class EsqlClientYamlAsyncSubmitAndFetchIT extends AbstractEsqlClientYamlIT { + public EsqlClientYamlAsyncSubmitAndFetchIT(final ClientYamlTestCandidate testCandidate) { + super(testCandidate); + } + + @ParametersFactory + public static Iterable parameters() throws Exception { + return EsqlClientYamlAsyncIT.parameters(doSection -> { + ApiCallSection copy = doSection.getApiCallSection().copyWithNewApi("esql.async_query"); + for (Map body : copy.getBodies()) { + body.put("wait_for_completion_timeout", "0ms"); + } + doSection.setApiCallSection(copy); + + DoSection fetch = new DoSection(doSection.getLocation()); + fetch.setApiCallSection(new ApiCallSection("esql.async_query_get")); + fetch.getApiCallSection().addParam("wait_for_completion_timeout", "30m"); + fetch.getApiCallSection().addParam("id", "$body.id"); + + /* + * The request to start the query doesn't make warnings or errors so shift + * those to the fetch. + */ + fetch.setExpectedWarningHeaders(doSection.getExpectedWarningHeaders()); + fetch.setExpectedWarningHeadersRegex(doSection.getExpectedWarningHeadersRegex()); + fetch.setAllowedWarningHeaders(doSection.getAllowedWarningHeaders()); + fetch.setAllowedWarningHeadersRegex(doSection.getAllowedWarningHeadersRegex()); + fetch.setCatch(doSection.getCatch()); + doSection.setExpectedWarningHeaders(List.of()); + doSection.setExpectedWarningHeadersRegex(List.of()); + doSection.setAllowedWarningHeaders(List.of()); + doSection.setAllowedWarningHeadersRegex(List.of()); + doSection.setCatch(null); + return Stream.of(doSection, fetch); + }); + } +} diff --git a/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlClientYamlIT.java b/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlClientYamlIT.java index 5af469c018345..e67ca751298be 100644 --- a/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlClientYamlIT.java +++ b/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlClientYamlIT.java @@ -9,29 +9,12 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.cluster.ElasticsearchCluster; -import org.elasticsearch.test.cluster.local.distribution.DistributionType; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; -import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import org.elasticsearch.xpack.esql.qa.rest.EsqlSpecTestCase; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; - -public class EsqlClientYamlIT extends ESClientYamlSuiteTestCase { - - @ClassRule - public static ElasticsearchCluster cluster = ElasticsearchCluster.local() - .distribution(DistributionType.DEFAULT) - .setting("xpack.security.enabled", "false") - .setting("xpack.license.self_generated.type", "trial") - .build(); - - @Override - protected String getTestRestCluster() { - return cluster.getHttpAddresses(); - } +/** + * Run the ESQL yaml tests against the synchronous API. + */ +public class EsqlClientYamlIT extends AbstractEsqlClientYamlIT { public EsqlClientYamlIT(final ClientYamlTestCandidate testCandidate) { super(testCandidate); } @@ -40,10 +23,4 @@ public EsqlClientYamlIT(final ClientYamlTestCandidate testCandidate) { public static Iterable parameters() throws Exception { return createParameters(); } - - @Before - @After - public void assertRequestBreakerEmpty() throws Exception { - EsqlSpecTestCase.assertRequestBreakerEmpty(); - } } From 9f6e5a4ccf5728f78c7ad532c15d2e6a131f0b10 Mon Sep 17 00:00:00 2001 From: Bogdan Pintea Date: Wed, 10 Jan 2024 22:27:13 +0100 Subject: [PATCH 21/75] ESQL: Add Warning tests for all remaining functions (#103739) This adds unit tests cases for all the functions that were missing tests checking on the correct generation of the Warning headers in case the execution raised an Exception that lead to a `null` result. --- .../esql/functions/types/mul.asciidoc | 1 + .../esql/functions/types/mv_sum.asciidoc | 3 ++ .../esql/functions/types/sub.asciidoc | 1 + .../function/scalar/date/DateExtract.java | 2 + .../function/AbstractFunctionTestCase.java | 34 ++++++++----- .../expression/function/TestCaseSupplier.java | 17 +++++++ .../scalar/date/DateExtractTests.java | 43 +++++++++++++---- .../function/scalar/date/DateParseTests.java | 39 +++++++++++++++ .../scalar/multivalue/MvSumTests.java | 48 +++++++++++++++++++ .../function/scalar/string/ReplaceTests.java | 36 +++++++------- .../AbstractArithmeticTestCase.java | 25 ++++++++++ .../operator/arithmetic/AddTests.java | 41 ++++++++++++++++ .../operator/arithmetic/DivTests.java | 25 ++++++++++ .../operator/arithmetic/ModTests.java | 25 ++++++++++ .../operator/arithmetic/MulTests.java | 46 ++++++++++++------ .../operator/arithmetic/SubTests.java | 35 +++++++++++++- 16 files changed, 365 insertions(+), 56 deletions(-) diff --git a/docs/reference/esql/functions/types/mul.asciidoc b/docs/reference/esql/functions/types/mul.asciidoc index eee2d68e4653f..2f5100b1d1494 100644 --- a/docs/reference/esql/functions/types/mul.asciidoc +++ b/docs/reference/esql/functions/types/mul.asciidoc @@ -4,4 +4,5 @@ lhs | rhs | result double | double | double integer | integer | integer long | long | long +unsigned_long | unsigned_long | unsigned_long |=== diff --git a/docs/reference/esql/functions/types/mv_sum.asciidoc b/docs/reference/esql/functions/types/mv_sum.asciidoc index 09cb78511d275..f1831429aa95c 100644 --- a/docs/reference/esql/functions/types/mv_sum.asciidoc +++ b/docs/reference/esql/functions/types/mv_sum.asciidoc @@ -2,4 +2,7 @@ |=== arg1 | result double | double +integer | integer +long | long +unsigned_long | unsigned_long |=== diff --git a/docs/reference/esql/functions/types/sub.asciidoc b/docs/reference/esql/functions/types/sub.asciidoc index ed26adf06ecde..826c4f6274652 100644 --- a/docs/reference/esql/functions/types/sub.asciidoc +++ b/docs/reference/esql/functions/types/sub.asciidoc @@ -8,4 +8,5 @@ double | double | double integer | integer | integer long | long | long time_duration | time_duration | time_duration +unsigned_long | unsigned_long | unsigned_long |=== diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java index eadea746a1bd1..3a0ab9403b841 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java @@ -58,6 +58,8 @@ public ExpressionEvaluator.Factory toEvaluator(Function foldingExceptionClass; + private String foldingExceptionMessage; + private final String expectedTypeError; private final boolean allTypesAreRepresentable; @@ -1141,6 +1144,14 @@ public String[] getExpectedWarnings() { return expectedWarnings; } + public Class foldingExceptionClass() { + return foldingExceptionClass; + } + + public String foldingExceptionMessage() { + return foldingExceptionMessage; + } + public String getExpectedTypeError() { return expectedTypeError; } @@ -1155,6 +1166,12 @@ public TestCase withWarning(String warning) { } return new TestCase(data, evaluatorToString, expectedType, matcher, newWarnings, expectedTypeError); } + + public TestCase withFoldingException(Class clazz, String message) { + foldingExceptionClass = clazz; + foldingExceptionMessage = message; + return this; + } } /** diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractTests.java index 1446fc54c99fa..3a6a5d8eabae3 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractTests.java @@ -30,6 +30,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; public class DateExtractTests extends AbstractScalarFunctionTestCase { public DateExtractTests(@Name("TestCase") Supplier testCaseSupplier) { @@ -38,17 +39,39 @@ public DateExtractTests(@Name("TestCase") Supplier te @ParametersFactory public static Iterable parameters() { - return parameterSuppliersFromTypedData(List.of(new TestCaseSupplier("Date Extract Year", () -> { - return new TestCaseSupplier.TestCase( - List.of( - new TestCaseSupplier.TypedData(new BytesRef("YEAR"), DataTypes.KEYWORD, "field"), - new TestCaseSupplier.TypedData(1687944333000L, DataTypes.DATETIME, "date") + return parameterSuppliersFromTypedData( + List.of( + new TestCaseSupplier( + List.of(DataTypes.KEYWORD, DataTypes.DATETIME), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BytesRef("YeAr"), DataTypes.KEYWORD, "chrono"), + new TestCaseSupplier.TypedData(1687944333000L, DataTypes.DATETIME, "date") + ), + "DateExtractEvaluator[value=Attribute[channel=1], chronoField=Attribute[channel=0], zone=Z]", + DataTypes.LONG, + equalTo(2023L) + ) ), - "DateExtractEvaluator[value=Attribute[channel=1], chronoField=Attribute[channel=0], zone=Z]", - DataTypes.LONG, - equalTo(2023L) - ); - }))); + new TestCaseSupplier( + List.of(DataTypes.KEYWORD, DataTypes.DATETIME), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BytesRef("not a unit"), DataTypes.KEYWORD, "chrono"), + new TestCaseSupplier.TypedData(0L, DataTypes.DATETIME, "date") + + ), + "DateExtractEvaluator[value=Attribute[channel=1], chronoField=Attribute[channel=0], zone=Z]", + DataTypes.LONG, + is(nullValue()) + ).withWarning("Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.") + .withWarning( + "Line -1:-1: java.lang.IllegalArgumentException: No enum constant java.time.temporal.ChronoField.NOT A UNIT" + ) + .withFoldingException(InvalidArgumentException.class, "invalid date field for []: not a unit") + ) + ) + ); } public void testAllChronoFields() { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseTests.java index 48d8079ace477..ae53f2e81d158 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseTests.java @@ -25,6 +25,8 @@ import java.util.function.Supplier; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.startsWith; public class DateParseTests extends AbstractScalarFunctionTestCase { @@ -59,6 +61,43 @@ public static Iterable parameters() { DataTypes.DATETIME, equalTo(1683244800000L) ) + ), + new TestCaseSupplier( + List.of(DataTypes.KEYWORD, DataTypes.KEYWORD), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BytesRef("not a format"), DataTypes.KEYWORD, "second"), + new TestCaseSupplier.TypedData(new BytesRef("2023-05-05"), DataTypes.KEYWORD, "first") + + ), + "DateParseEvaluator[val=Attribute[channel=1], formatter=Attribute[channel=0], zoneId=Z]", + DataTypes.DATETIME, + is(nullValue()) + ).withWarning("Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.") + .withWarning( + "Line -1:-1: java.lang.IllegalArgumentException: Invalid format: [not a format]: Unknown pattern letter: o" + ) + .withFoldingException( + InvalidArgumentException.class, + "invalid date pattern for []: Invalid format: [not a format]: Unknown pattern letter: o" + ) + ), + new TestCaseSupplier( + List.of(DataTypes.KEYWORD, DataTypes.KEYWORD), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BytesRef("yyyy-MM-dd"), DataTypes.KEYWORD, "second"), + new TestCaseSupplier.TypedData(new BytesRef("not a date"), DataTypes.KEYWORD, "first") + + ), + "DateParseEvaluator[val=Attribute[channel=1], formatter=Attribute[channel=0], zoneId=Z]", + DataTypes.DATETIME, + is(nullValue()) + ).withWarning("Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.") + .withWarning( + "Line -1:-1: java.lang.IllegalArgumentException: " + + "failed to parse date field [not a date] with format [yyyy-MM-dd]" + ) ) ) ); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumTests.java index da6a7aec8462c..99d338f5b36bf 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumTests.java @@ -10,16 +10,22 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.type.DataTypes; import java.util.ArrayList; import java.util.List; +import java.util.Locale; import java.util.function.Supplier; +import static org.elasticsearch.xpack.ql.util.NumericUtils.asLongUnsigned; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; public class MvSumTests extends AbstractMultivalueFunctionTestCase { public MvSumTests(@Name("TestCase") Supplier testCaseSupplier) { @@ -34,9 +40,51 @@ public static Iterable parameters() { // ints(cases, "mv_sum", "MvSum", (size, values) -> equalTo(values.sum())); // longs(cases, "mv_sum", "MvSum", (size, values) -> equalTo(values.sum())); // unsignedLongAsBigInteger(cases, "mv_sum", "MvSum", (size, values) -> equalTo(values.sum())); + + cases.add(arithmeticExceptionCase(DataTypes.INTEGER, () -> { + List data = randomList(1, 10, () -> randomIntBetween(0, Integer.MAX_VALUE)); + data.add(Integer.MAX_VALUE); + return data; + })); + cases.add(arithmeticExceptionCase(DataTypes.INTEGER, () -> { + List data = randomList(1, 10, () -> randomIntBetween(Integer.MIN_VALUE, 0)); + data.add(Integer.MIN_VALUE); + return data; + })); + cases.add(arithmeticExceptionCase(DataTypes.LONG, () -> { + List data = randomList(1, 10, () -> randomLongBetween(0L, Long.MAX_VALUE)); + data.add(Long.MAX_VALUE); + return data; + })); + cases.add(arithmeticExceptionCase(DataTypes.LONG, () -> { + List data = randomList(1, 10, () -> randomLongBetween(Long.MIN_VALUE, 0L)); + data.add(Long.MIN_VALUE); + return data; + })); + cases.add(arithmeticExceptionCase(DataTypes.UNSIGNED_LONG, () -> { + List data = randomList(1, 10, ESTestCase::randomLong); + data.add(asLongUnsigned(UNSIGNED_LONG_MAX)); + return data; + })); + return parameterSuppliersFromTypedData(cases); } + private static TestCaseSupplier arithmeticExceptionCase(DataType dataType, Supplier dataSupplier) { + String typeNameOverflow = dataType.typeName().toLowerCase(Locale.ROOT) + " overflow"; + return new TestCaseSupplier( + "<" + typeNameOverflow + ">", + List.of(dataType), + () -> new TestCaseSupplier.TestCase( + List.of(new TestCaseSupplier.TypedData(dataSupplier.get(), dataType, "field")), + "MvSum[field=Attribute[channel=0]]", + dataType, + is(nullValue()) + ).withWarning("Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.") + .withWarning("Line -1:-1: java.lang.ArithmeticException: " + typeNameOverflow) + ); + } + @Override protected Expression build(Source source, Expression field) { return new MvSum(source, field); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceTests.java index 6e57a69720ca5..60268b9e27764 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceTests.java @@ -22,6 +22,7 @@ import java.util.ArrayList; import java.util.List; import java.util.function.Supplier; +import java.util.regex.PatternSyntaxException; import static org.hamcrest.Matchers.equalTo; @@ -79,24 +80,23 @@ public static Iterable parameters() { ) ); - // a syntactically wrong regex should yield null. And a warning header - // but for now we are letting the exception pass through. See also https://github.com/elastic/elasticsearch/issues/100038 - // suppliers.add(new TestCaseSupplier("invalid_regex", () -> { - // String text = randomAlphaOfLength(10); - // String invalidRegex = "["; - // String newStr = randomAlphaOfLength(5); - // return new TestCaseSupplier.TestCase( - // List.of( - // new TestCaseSupplier.TypedData(new BytesRef(text), DataTypes.KEYWORD, "str"), - // new TestCaseSupplier.TypedData(new BytesRef(invalidRegex), DataTypes.KEYWORD, "oldStr"), - // new TestCaseSupplier.TypedData(new BytesRef(newStr), DataTypes.KEYWORD, "newStr") - // ), - // "ReplaceEvaluator[str=Attribute[channel=0], regex=Attribute[channel=1], newStr=Attribute[channel=2]]", - // DataTypes.KEYWORD, - // equalTo(null) - // ).withWarning("Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.") - // .withWarning("java.util.regex.PatternSyntaxException: Unclosed character class near index 0\r\n[\r\n^"); - // })); + suppliers.add(new TestCaseSupplier(List.of(DataTypes.KEYWORD, DataTypes.KEYWORD, DataTypes.KEYWORD), () -> { + String text = randomAlphaOfLength(10); + String invalidRegex = "["; + String newStr = randomAlphaOfLength(5); + return new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BytesRef(text), DataTypes.KEYWORD, "str"), + new TestCaseSupplier.TypedData(new BytesRef(invalidRegex), DataTypes.KEYWORD, "oldStr"), + new TestCaseSupplier.TypedData(new BytesRef(newStr), DataTypes.KEYWORD, "newStr") + ), + "ReplaceEvaluator[str=Attribute[channel=0], regex=Attribute[channel=1], newStr=Attribute[channel=2]]", + DataTypes.KEYWORD, + equalTo(null) + ).withWarning("Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.") + .withWarning("Line -1:-1: java.util.regex.PatternSyntaxException: Unclosed character class near index 0\n[\n^") + .withFoldingException(PatternSyntaxException.class, "Unclosed character class near index 0\n[\n^"); + })); return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(false, suppliers))); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AbstractArithmeticTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AbstractArithmeticTestCase.java index 49233a19114c8..02005d51c96d5 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AbstractArithmeticTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AbstractArithmeticTestCase.java @@ -17,10 +17,12 @@ import java.util.List; import java.util.Locale; +import java.util.function.Supplier; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; public abstract class AbstractArithmeticTestCase extends AbstractBinaryOperatorTestCase { protected Matcher resultMatcher(List data, DataType dataType) { @@ -119,4 +121,27 @@ protected DataType expectedType(DataType lhsType, DataType rhsType) { } throw new UnsupportedOperationException(); } + + static TestCaseSupplier arithmeticExceptionOverflowCase( + DataType dataType, + Supplier lhsSupplier, + Supplier rhsSupplier, + String evaluator + ) { + String typeNameOverflow = dataType.typeName().toLowerCase(Locale.ROOT) + " overflow"; + return new TestCaseSupplier( + "<" + typeNameOverflow + ">", + List.of(dataType), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(lhsSupplier.get(), dataType, "lhs"), + new TestCaseSupplier.TypedData(rhsSupplier.get(), dataType, "rhs") + ), + evaluator + "[lhs=Attribute[channel=0], rhs=Attribute[channel=1]]", + dataType, + is(nullValue()) + ).withWarning("Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.") + .withWarning("Line -1:-1: java.lang.ArithmeticException: " + typeNameOverflow) + ); + } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddTests.java index 2280ad9a2b1fe..f60858bb49812 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddTests.java @@ -211,6 +211,47 @@ public static Iterable parameters() { ).withWarning("Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.") .withWarning("Line -1:-1: java.lang.IllegalArgumentException: single-value function encountered multi-value"); }))); + // exact math arithmetic exceptions + suppliers.add( + arithmeticExceptionOverflowCase( + DataTypes.INTEGER, + () -> randomIntBetween(1, Integer.MAX_VALUE), + () -> Integer.MAX_VALUE, + "AddIntsEvaluator" + ) + ); + suppliers.add( + arithmeticExceptionOverflowCase( + DataTypes.INTEGER, + () -> randomIntBetween(Integer.MIN_VALUE, -1), + () -> Integer.MIN_VALUE, + "AddIntsEvaluator" + ) + ); + suppliers.add( + arithmeticExceptionOverflowCase( + DataTypes.LONG, + () -> randomLongBetween(1L, Long.MAX_VALUE), + () -> Long.MAX_VALUE, + "AddLongsEvaluator" + ) + ); + suppliers.add( + arithmeticExceptionOverflowCase( + DataTypes.LONG, + () -> randomLongBetween(Long.MIN_VALUE, -1L), + () -> Long.MIN_VALUE, + "AddLongsEvaluator" + ) + ); + suppliers.add( + arithmeticExceptionOverflowCase( + DataTypes.UNSIGNED_LONG, + () -> asLongUnsigned(randomBigInteger()), + () -> asLongUnsigned(UNSIGNED_LONG_MAX), + "AddUnsignedLongsEvaluator" + ) + ); return parameterSuppliersFromTypedData(suppliers); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivTests.java index 82b6bbda276b6..4aa8786f2cd69 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivTests.java @@ -10,15 +10,19 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.compute.data.Block; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; import java.math.BigInteger; import java.util.List; import java.util.function.Supplier; +import static org.elasticsearch.compute.data.BlockUtils.toJavaObject; +import static org.elasticsearch.xpack.ql.util.NumericUtils.ZERO_AS_UNSIGNED_LONG; import static org.elasticsearch.xpack.ql.util.NumericUtils.asLongUnsigned; import static org.elasticsearch.xpack.ql.util.NumericUtils.unsignedLongAsBigInteger; import static org.hamcrest.Matchers.equalTo; @@ -95,6 +99,27 @@ public static Iterable parameters() { )); } + // run dedicated test to avoid the JVM optimized ArithmeticException that lacks a message + public void testDivisionByZero() { + DataType testCaseType = testCase.getData().get(0).type(); + List data = switch (testCaseType.typeName()) { + case "INTEGER" -> List.of(randomInt(), 0); + case "LONG" -> List.of(randomLong(), 0L); + case "UNSIGNED_LONG" -> List.of(randomLong(), ZERO_AS_UNSIGNED_LONG); + default -> null; + }; + if (data != null) { + var op = build(Source.EMPTY, field("lhs", testCaseType), field("rhs", testCaseType)); + try (Block block = evaluator(op).get(driverContext()).eval(row(data))) { + assertCriticalWarnings( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: java.lang.ArithmeticException: / by zero" + ); + assertNull(toJavaObject(block, 0)); + } + } + } + @Override protected boolean rhsOk(Object o) { if (o instanceof Number n) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModTests.java index 425ef2bb11a6b..5beaf0b782af7 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModTests.java @@ -10,15 +10,19 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.compute.data.Block; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; import java.math.BigInteger; import java.util.List; import java.util.function.Supplier; +import static org.elasticsearch.compute.data.BlockUtils.toJavaObject; +import static org.elasticsearch.xpack.ql.util.NumericUtils.ZERO_AS_UNSIGNED_LONG; import static org.elasticsearch.xpack.ql.util.NumericUtils.asLongUnsigned; import static org.elasticsearch.xpack.ql.util.NumericUtils.unsignedLongAsBigInteger; import static org.hamcrest.Matchers.equalTo; @@ -95,6 +99,27 @@ public static Iterable parameters() { )); } + // run dedicated test to avoid the JVM optimized ArithmeticException that lacks a message + public void testDivisionByZero() { + DataType testCaseType = testCase.getData().get(0).type(); + List data = switch (testCaseType.typeName()) { + case "INTEGER" -> List.of(randomInt(), 0); + case "LONG" -> List.of(randomLong(), 0L); + case "UNSIGNED_LONG" -> List.of(randomLong(), ZERO_AS_UNSIGNED_LONG); + default -> null; + }; + if (data != null) { + var op = build(Source.EMPTY, field("lhs", testCaseType), field("rhs", testCaseType)); + try (Block block = evaluator(op).get(driverContext()).eval(row(data))) { + assertCriticalWarnings( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: java.lang.ArithmeticException: / by zero" + ); + assertNull(toJavaObject(block, 0)); + } + } + } + @Override protected boolean rhsOk(Object o) { if (o instanceof Number n) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulTests.java index 2ab72ebf9d5f6..839a554353e08 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulTests.java @@ -68,20 +68,38 @@ public static Iterable parameters() { DataTypes.DOUBLE, equalTo(lhs * rhs) ); - })/*, new TestCaseSupplier("ULong * ULong", () -> { - // Ensure we don't have an overflow - long rhs = randomLongBetween(0, 1024); - long lhs = randomLongBetween(0, 1024); - BigInteger lhsBI = unsignedLongAsBigInteger(lhs); - BigInteger rhsBI = unsignedLongAsBigInteger(rhs); - return new TestCase( - Source.EMPTY, - List.of(new TypedData(lhs, DataTypes.UNSIGNED_LONG, "lhs"), new TypedData(rhs, DataTypes.UNSIGNED_LONG, "rhs")), - "MulUnsignedLongsEvaluator[lhs=Attribute[channel=0], rhs=Attribute[channel=1]]", - equalTo(asLongUnsigned(lhsBI.multiply(rhsBI).longValue())) - ); - }) - */ + }), /* new TestCaseSupplier("ULong * ULong", () -> { + // Ensure we don't have an overflow + long rhs = randomLongBetween(0, 1024); + long lhs = randomLongBetween(0, 1024); + BigInteger lhsBI = unsignedLongAsBigInteger(lhs); + BigInteger rhsBI = unsignedLongAsBigInteger(rhs); + return new TestCase( + Source.EMPTY, + List.of(new TypedData(lhs, DataTypes.UNSIGNED_LONG, "lhs"), new TypedData(rhs, DataTypes.UNSIGNED_LONG, "rhs")), + "MulUnsignedLongsEvaluator[lhs=Attribute[channel=0], rhs=Attribute[channel=1]]", + equalTo(asLongUnsigned(lhsBI.multiply(rhsBI).longValue())) + ); + }) + */ + arithmeticExceptionOverflowCase( + DataTypes.INTEGER, + () -> randomBoolean() ? Integer.MIN_VALUE : Integer.MAX_VALUE, + () -> randomIntBetween(2, Integer.MAX_VALUE), + "MulIntsEvaluator" + ), + arithmeticExceptionOverflowCase( + DataTypes.LONG, + () -> randomBoolean() ? Long.MIN_VALUE : Long.MAX_VALUE, + () -> randomLongBetween(2L, Long.MAX_VALUE), + "MulLongsEvaluator" + ), + arithmeticExceptionOverflowCase( + DataTypes.UNSIGNED_LONG, + () -> asLongUnsigned(UNSIGNED_LONG_MAX), + () -> asLongUnsigned(randomLongBetween(-Long.MAX_VALUE, Long.MAX_VALUE)), + "MulUnsignedLongsEvaluator" + ) )); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubTests.java index b2f54e4d2400c..47763af74e150 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubTests.java @@ -30,6 +30,7 @@ import static org.elasticsearch.xpack.ql.type.DataTypes.isNull; import static org.elasticsearch.xpack.ql.type.DateUtils.asDateTime; import static org.elasticsearch.xpack.ql.type.DateUtils.asMillis; +import static org.elasticsearch.xpack.ql.util.NumericUtils.ZERO_AS_UNSIGNED_LONG; import static org.elasticsearch.xpack.ql.util.NumericUtils.asLongUnsigned; import static org.elasticsearch.xpack.ql.util.NumericUtils.unsignedLongAsBigInteger; import static org.hamcrest.Matchers.equalTo; @@ -158,7 +159,39 @@ public static Iterable parameters() { is(nullValue()) ).withWarning("Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.") .withWarning("Line -1:-1: java.lang.IllegalArgumentException: single-value function encountered multi-value"); - }))); + }), + // exact math arithmetic exceptions + arithmeticExceptionOverflowCase( + DataTypes.INTEGER, + () -> Integer.MIN_VALUE, + () -> randomIntBetween(1, Integer.MAX_VALUE), + "SubIntsEvaluator" + ), + arithmeticExceptionOverflowCase( + DataTypes.INTEGER, + () -> randomIntBetween(Integer.MIN_VALUE, -2), + () -> Integer.MAX_VALUE, + "SubIntsEvaluator" + ), + arithmeticExceptionOverflowCase( + DataTypes.LONG, + () -> Long.MIN_VALUE, + () -> randomLongBetween(1L, Long.MAX_VALUE), + "SubLongsEvaluator" + ), + arithmeticExceptionOverflowCase( + DataTypes.LONG, + () -> randomLongBetween(Long.MIN_VALUE, -2L), + () -> Long.MAX_VALUE, + "SubLongsEvaluator" + ), + arithmeticExceptionOverflowCase( + DataTypes.UNSIGNED_LONG, + () -> ZERO_AS_UNSIGNED_LONG, + () -> randomLongBetween(-Long.MAX_VALUE, Long.MAX_VALUE), + "SubUnsignedLongsEvaluator" + ) + )); } @Override From 11331e9f6bc5f756e97c1247e2293d34c1d87c6e Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Wed, 10 Jan 2024 14:05:12 -0800 Subject: [PATCH 22/75] Mute DfsProfilerIT.testProfileDfs --- .../java/org/elasticsearch/search/profile/dfs/DfsProfilerIT.java | 1 + 1 file changed, 1 insertion(+) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/profile/dfs/DfsProfilerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/profile/dfs/DfsProfilerIT.java index 7e504a100ba56..65393f4185ce8 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/profile/dfs/DfsProfilerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/profile/dfs/DfsProfilerIT.java @@ -39,6 +39,7 @@ public class DfsProfilerIT extends ESIntegTestCase { private static final int KNN_DIM = 3; + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/104235") public void testProfileDfs() throws Exception { String textField = "text_field"; String numericField = "number"; From 60254575f013951c7f8cfe9e842bd078a43c8dfe Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Slobodan=20Adamovi=C4=87?= Date: Wed, 10 Jan 2024 23:11:18 +0100 Subject: [PATCH 23/75] Expose token authentication metrics (#104142) This PR adds metrics for recording successful and failed authentications for OAuth2 tokens. Exposed metrics are: - `es.security.authc.token.success.total` - `es.security.authc.token.failures.total` - `es.security.authc.token.time` --- docs/changelog/104142.yaml | 5 + .../security/authc/AuthenticationService.java | 2 +- .../authc/OAuth2TokenAuthenticator.java | 34 ++++- .../xpack/security/authc/TokenService.java | 2 +- .../security/metric/SecurityMetricType.java | 11 ++ .../authc/OAuth2TokenAuthenticatorTests.java | 140 ++++++++++++++++++ 6 files changed, 191 insertions(+), 3 deletions(-) create mode 100644 docs/changelog/104142.yaml create mode 100644 x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/OAuth2TokenAuthenticatorTests.java diff --git a/docs/changelog/104142.yaml b/docs/changelog/104142.yaml new file mode 100644 index 0000000000000..08bf9ef759090 --- /dev/null +++ b/docs/changelog/104142.yaml @@ -0,0 +1,5 @@ +pr: 104142 +summary: Expose token authentication metrics +area: Authentication +type: enhancement +issues: [] diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/AuthenticationService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/AuthenticationService.java index 7dbe402cea046..06883d9423387 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/AuthenticationService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/AuthenticationService.java @@ -112,7 +112,7 @@ public AuthenticationService( anonymousUser, new AuthenticationContextSerializer(), new ServiceAccountAuthenticator(serviceAccountService, nodeName, meterRegistry), - new OAuth2TokenAuthenticator(tokenService), + new OAuth2TokenAuthenticator(tokenService, meterRegistry), new ApiKeyAuthenticator(apiKeyService, nodeName, meterRegistry), new RealmsAuthenticator(numInvalidation, lastSuccessfulAuthCache) ); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/OAuth2TokenAuthenticator.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/OAuth2TokenAuthenticator.java index 6f50cd1f1db1d..ffe6f83a37b2f 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/OAuth2TokenAuthenticator.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/OAuth2TokenAuthenticator.java @@ -12,17 +12,38 @@ import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.telemetry.metric.MeterRegistry; import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authc.AuthenticationResult; import org.elasticsearch.xpack.core.security.authc.AuthenticationToken; import org.elasticsearch.xpack.core.security.authc.support.BearerToken; +import org.elasticsearch.xpack.security.metric.InstrumentedSecurityActionListener; +import org.elasticsearch.xpack.security.metric.SecurityMetricType; +import org.elasticsearch.xpack.security.metric.SecurityMetrics; + +import java.util.Map; +import java.util.function.LongSupplier; class OAuth2TokenAuthenticator implements Authenticator { + public static final String ATTRIBUTE_AUTHC_FAILURE_REASON = "es.security.token_authc_failure_reason"; + private static final Logger logger = LogManager.getLogger(OAuth2TokenAuthenticator.class); + + private final SecurityMetrics authenticationMetrics; private final TokenService tokenService; - OAuth2TokenAuthenticator(TokenService tokenService) { + OAuth2TokenAuthenticator(TokenService tokenService, MeterRegistry meterRegistry) { + this(tokenService, meterRegistry, System::nanoTime); + } + + OAuth2TokenAuthenticator(TokenService tokenService, MeterRegistry meterRegistry, LongSupplier nanoTimeSupplier) { + this.authenticationMetrics = new SecurityMetrics<>( + SecurityMetricType.AUTHC_OAUTH2_TOKEN, + meterRegistry, + this::buildMetricAttributes, + nanoTimeSupplier + ); this.tokenService = tokenService; } @@ -45,6 +66,10 @@ public void authenticate(Context context, ActionListener> listener) { tokenService.tryAuthenticateToken(bearerToken.credentials(), ActionListener.wrap(userToken -> { if (userToken != null) { listener.onResponse(AuthenticationResult.success(userToken.getAuthentication())); @@ -62,4 +87,11 @@ public void authenticate(Context context, ActionListener buildMetricAttributes(BearerToken token, String failureReason) { + if (failureReason != null) { + return Map.of(ATTRIBUTE_AUTHC_FAILURE_REASON, failureReason); + } + return Map.of(); + } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java index 9c378e0e1156e..0bf0ab565d015 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java @@ -154,7 +154,7 @@ * Service responsible for the creation, validation, and other management of {@link UserToken} * objects for authentication */ -public final class TokenService { +public class TokenService { /** * The parameters below are used to generate the cryptographic key that is used to encrypt the diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/metric/SecurityMetricType.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/metric/SecurityMetricType.java index d6a85b4e9ddf6..a77207c8e5677 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/metric/SecurityMetricType.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/metric/SecurityMetricType.java @@ -38,6 +38,17 @@ public enum SecurityMetricType { ) ), + AUTHC_OAUTH2_TOKEN( + SecurityMetricGroup.AUTHC, + new SecurityMetricInfo("es.security.authc.token.success.total", "Number of successful OAuth2 token authentications.", "count"), + new SecurityMetricInfo("es.security.authc.token.failures.total", "Number of failed OAuth2 token authentications.", "count"), + new SecurityMetricInfo( + "es.security.authc.token.time", + "Time it took (in nanoseconds) to execute OAuth2 token authentication.", + "ns" + ) + ), + ; private final SecurityMetricGroup group; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/OAuth2TokenAuthenticatorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/OAuth2TokenAuthenticatorTests.java new file mode 100644 index 0000000000000..e977c32565893 --- /dev/null +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/OAuth2TokenAuthenticatorTests.java @@ -0,0 +1,140 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security.authc; + +import org.elasticsearch.ElasticsearchSecurityException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.telemetry.TestTelemetryPlugin; +import org.elasticsearch.xpack.core.security.authc.Authentication; +import org.elasticsearch.xpack.core.security.authc.AuthenticationResult; +import org.elasticsearch.xpack.core.security.authc.AuthenticationTestHelper; +import org.elasticsearch.xpack.core.security.authc.support.BearerToken; +import org.elasticsearch.xpack.security.metric.SecurityMetricType; + +import java.time.Clock; +import java.time.Instant; +import java.util.Map; + +import static org.elasticsearch.test.ActionListenerUtils.anyActionListener; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.sameInstance; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.same; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class OAuth2TokenAuthenticatorTests extends AbstractAuthenticatorTests { + + public void testRecordingSuccessfulAuthenticationMetrics() { + final TestTelemetryPlugin telemetryPlugin = new TestTelemetryPlugin(); + final long initialNanoTime = randomLongBetween(0, 100); + final TestNanoTimeSupplier nanoTimeSupplier = new TestNanoTimeSupplier(initialNanoTime); + final TokenService tokenService = mock(TokenService.class); + final OAuth2TokenAuthenticator oauth2Authenticator = new OAuth2TokenAuthenticator( + tokenService, + telemetryPlugin.getTelemetryProvider(Settings.EMPTY).getMeterRegistry(), + nanoTimeSupplier + ); + + final BearerToken bearerToken = randomBearerToken(); + final Authenticator.Context context = mockAuthenticatorContext(bearerToken); + + final long executionTimeInNanos = randomLongBetween(0, 500); + doAnswer(invocation -> { + nanoTimeSupplier.advanceTime(executionTimeInNanos); + final ActionListener listener = invocation.getArgument(1); + final Authentication authentication = AuthenticationTestHelper.builder() + .user(AuthenticationTestHelper.randomUser()) + .realmRef(AuthenticationTestHelper.randomRealmRef()) + .build(false); + final int seconds = randomIntBetween(0, Math.toIntExact(TimeValue.timeValueMinutes(30L).getSeconds())); + final Instant expirationTime = Clock.systemUTC().instant().plusSeconds(seconds); + final UserToken userToken = new UserToken(authentication, expirationTime); + listener.onResponse(userToken); + return Void.TYPE; + }).when(tokenService).tryAuthenticateToken(any(SecureString.class), anyActionListener()); + + final PlainActionFuture> future = new PlainActionFuture<>(); + oauth2Authenticator.authenticate(context, future); + var authResult = future.actionGet(); + assertThat(authResult.isAuthenticated(), equalTo(true)); + + // verify we recorded success metric + assertSingleSuccessAuthMetric(telemetryPlugin, SecurityMetricType.AUTHC_OAUTH2_TOKEN, Map.of()); + + // verify that there were no failures recorded + assertZeroFailedAuthMetrics(telemetryPlugin, SecurityMetricType.AUTHC_OAUTH2_TOKEN); + + // verify we recorded authentication time + assertAuthenticationTimeMetric(telemetryPlugin, SecurityMetricType.AUTHC_OAUTH2_TOKEN, executionTimeInNanos, Map.of()); + } + + public void testRecordingFailedAuthenticationMetrics() { + final TestTelemetryPlugin telemetryPlugin = new TestTelemetryPlugin(); + final long initialNanoTime = randomLongBetween(0, 100); + final TestNanoTimeSupplier nanoTimeSupplier = new TestNanoTimeSupplier(initialNanoTime); + final TokenService tokenService = mock(TokenService.class); + final OAuth2TokenAuthenticator oauth2Authenticator = new OAuth2TokenAuthenticator( + tokenService, + telemetryPlugin.getTelemetryProvider(Settings.EMPTY).getMeterRegistry(), + nanoTimeSupplier + ); + + final BearerToken bearerToken = randomBearerToken(); + final Authenticator.Context context = mockAuthenticatorContext(bearerToken); + + var failureError = new ElasticsearchSecurityException("failed to authenticate OAuth2 token", RestStatus.UNAUTHORIZED); + when(context.getRequest().exceptionProcessingRequest(same(failureError), any())).thenReturn(failureError); + + final long executionTimeInNanos = randomLongBetween(0, 500); + doAnswer(invocation -> { + nanoTimeSupplier.advanceTime(executionTimeInNanos); + final ActionListener listener = invocation.getArgument(1); + listener.onFailure(failureError); + return Void.TYPE; + }).when(tokenService).tryAuthenticateToken(any(SecureString.class), anyActionListener()); + + final PlainActionFuture> future = new PlainActionFuture<>(); + oauth2Authenticator.authenticate(context, future); + var e = expectThrows(ElasticsearchSecurityException.class, future::actionGet); + assertThat(e, sameInstance(failureError)); + + // verify we recorded failure metric + assertSingleFailedAuthMetric( + telemetryPlugin, + SecurityMetricType.AUTHC_OAUTH2_TOKEN, + Map.ofEntries(Map.entry(OAuth2TokenAuthenticator.ATTRIBUTE_AUTHC_FAILURE_REASON, "failed to authenticate OAuth2 token")) + ); + + // verify that there were no successes recorded + assertZeroSuccessAuthMetrics(telemetryPlugin, SecurityMetricType.AUTHC_OAUTH2_TOKEN); + + // verify we recorded authentication time + assertAuthenticationTimeMetric(telemetryPlugin, SecurityMetricType.AUTHC_OAUTH2_TOKEN, executionTimeInNanos, Map.of()); + } + + private static BearerToken randomBearerToken() { + return new BearerToken(new SecureString(randomAlphaOfLengthBetween(5, 10).toCharArray())); + } + + private Authenticator.Context mockAuthenticatorContext(BearerToken token) { + final Authenticator.Context context = mock(Authenticator.Context.class); + when(context.getMostRecentAuthenticationToken()).thenReturn(token); + when(context.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY)); + final AuthenticationService.AuditableRequest auditableRequest = mock(AuthenticationService.AuditableRequest.class); + when(context.getRequest()).thenReturn(auditableRequest); + return context; + } +} From 142d4df606771f8b1e28c22744092afa501a4e84 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Wed, 10 Jan 2024 14:37:38 -0800 Subject: [PATCH 24/75] Simplify handling of runtime java for tests clusters (#104232) --- .../groovy/elasticsearch.runtime-jdk-provision.gradle | 9 --------- .../gradle/internal/test/rest/RestTestBasePlugin.java | 2 -- .../qa/die_with_dignity/DieWithDignityIT.java | 2 +- .../test/cluster/local/DefaultEnvironmentProvider.java | 3 +-- 4 files changed, 2 insertions(+), 14 deletions(-) diff --git a/build-tools-internal/src/main/groovy/elasticsearch.runtime-jdk-provision.gradle b/build-tools-internal/src/main/groovy/elasticsearch.runtime-jdk-provision.gradle index 7c7c05facb2e1..f85ceed18604b 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.runtime-jdk-provision.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.runtime-jdk-provision.gradle @@ -47,15 +47,6 @@ configure(allprojects) { } } } - - project.plugins.withType(RestTestBasePlugin) { - tasks.withType(StandaloneRestIntegTestTask).configureEach { - if (BuildParams.getIsRuntimeJavaHomeSet() == false) { - nonInputProperties.systemProperty("tests.runtime.java", "${-> launcher.map { it.metadata.installationPath.asFile.path }.get()}") - } - } - } - project.plugins.withType(ThirdPartyAuditPrecommitPlugin) { project.getTasks().withType(ThirdPartyAuditTask.class).configureEach { if (BuildParams.getIsRuntimeJavaHomeSet() == false) { diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java index b51842bbdcbf7..6d43ad109c323 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java @@ -63,7 +63,6 @@ public class RestTestBasePlugin implements Plugin { private static final String TESTS_MAX_PARALLEL_FORKS_SYSPROP = "tests.max.parallel.forks"; - private static final String TESTS_RUNTIME_JAVA_SYSPROP = "tests.runtime.java"; private static final String DEFAULT_DISTRIBUTION_SYSPROP = "tests.default.distribution"; private static final String INTEG_TEST_DISTRIBUTION_SYSPROP = "tests.integ-test.distribution"; private static final String BWC_SNAPSHOT_DISTRIBUTION_SYSPROP_PREFIX = "tests.snapshot.distribution."; @@ -189,7 +188,6 @@ public void apply(Project project) { // Wire up integ-test distribution by default for all test tasks FileCollection extracted = integTestDistro.getExtracted(); nonInputSystemProperties.systemProperty(INTEG_TEST_DISTRIBUTION_SYSPROP, () -> extracted.getSingleFile().getPath()); - nonInputSystemProperties.systemProperty(TESTS_RUNTIME_JAVA_SYSPROP, BuildParams.getRuntimeJavaHome()); // Add `usesDefaultDistribution()` extension method to test tasks to indicate they require the default distro task.getExtensions().getExtraProperties().set("usesDefaultDistribution", new Closure(task) { diff --git a/test/external-modules/die-with-dignity/src/javaRestTest/java/org/elasticsearch/qa/die_with_dignity/DieWithDignityIT.java b/test/external-modules/die-with-dignity/src/javaRestTest/java/org/elasticsearch/qa/die_with_dignity/DieWithDignityIT.java index b313b87fc2153..a176a962adc80 100644 --- a/test/external-modules/die-with-dignity/src/javaRestTest/java/org/elasticsearch/qa/die_with_dignity/DieWithDignityIT.java +++ b/test/external-modules/die-with-dignity/src/javaRestTest/java/org/elasticsearch/qa/die_with_dignity/DieWithDignityIT.java @@ -77,7 +77,7 @@ public void testDieWithDignity() throws Exception { } private Process startJcmd(long pid) throws IOException { - final String jcmdPath = PathUtils.get(System.getProperty("tests.runtime.java"), "bin/jcmd").toString(); + final String jcmdPath = PathUtils.get(System.getProperty("java.home"), "bin/jcmd").toString(); return new ProcessBuilder().command(jcmdPath, Long.toString(pid), "VM.command_line").redirectErrorStream(true).start(); } diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultEnvironmentProvider.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultEnvironmentProvider.java index aa3982e076ae0..fc45325008ba4 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultEnvironmentProvider.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultEnvironmentProvider.java @@ -20,7 +20,6 @@ public class DefaultEnvironmentProvider implements EnvironmentProvider { private static final String HOSTNAME_OVERRIDE = "LinuxDarwinHostname"; private static final String COMPUTERNAME_OVERRIDE = "WindowsComputername"; - private static final String TESTS_RUNTIME_JAVA_SYSPROP = "tests.runtime.java"; @Override public Map get(LocalNodeSpec nodeSpec) { @@ -28,7 +27,7 @@ public Map get(LocalNodeSpec nodeSpec) { // If we are testing the current version of Elasticsearch, use the configured runtime Java, otherwise use the bundled JDK if (nodeSpec.getDistributionType() == DistributionType.INTEG_TEST || nodeSpec.getVersion().equals(Version.CURRENT)) { - environment.put("ES_JAVA_HOME", System.getProperty(TESTS_RUNTIME_JAVA_SYSPROP)); + environment.put("ES_JAVA_HOME", System.getProperty("java.home")); } // Override the system hostname variables for testing From 6be0ab301f2fbbe806834a7bf89109841d423a87 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Wed, 10 Jan 2024 14:48:55 -0800 Subject: [PATCH 25/75] Mute TransformChainIT --- .../xpack/transform/integration/TransformChainIT.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformChainIT.java b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformChainIT.java index 450238b95e26e..b73adea63a223 100644 --- a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformChainIT.java +++ b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformChainIT.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.transform.integration; +import org.apache.lucene.tests.util.LuceneTestCase.AwaitsFix; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.common.Strings; @@ -25,6 +26,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; +@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/104238") public class TransformChainIT extends TransformRestTestCase { private static final String DEST_INDEX_TEMPLATE = """ From 321f26c01f5e4b36b2c33a84dbaf2d747990c9f6 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Wed, 10 Jan 2024 15:26:11 -0800 Subject: [PATCH 26/75] Enable feature flag in node shutdown full cluster restart tests (#104219) This should resolve test failures in release builds like [this one](https://gradle-enterprise.elastic.co/s/twbb3r777mhlo/tests/task/:x-pack:plugin:shutdown:qa:full-cluster-restart:v8.12.0%23bwcTest/details/org.elasticsearch.xpack.restart.FullClusterRestartIT/testNodeShutdown%20%7Bcluster%3DUPGRADED%7D?page=eyJvdXRwdXQiOnsiMCI6NX19&top-execution=1). --- .../org/elasticsearch/xpack/restart/FullClusterRestartIT.java | 1 + 1 file changed, 1 insertion(+) diff --git a/x-pack/plugin/shutdown/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java b/x-pack/plugin/shutdown/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java index 7a90907b9cf39..f45231923dee5 100644 --- a/x-pack/plugin/shutdown/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java +++ b/x-pack/plugin/shutdown/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java @@ -64,6 +64,7 @@ public class FullClusterRestartIT extends ParameterizedFullClusterRestartTestCas .keystore("xpack.watcher.encryption_key", Resource.fromClasspath("system_key")) .keystore("xpack.security.transport.ssl.secure_key_passphrase", "testnode") .feature(FeatureFlag.TIME_SERIES_MODE) + .feature(FeatureFlag.FAILURE_STORE_ENABLED) .build(); public FullClusterRestartIT(@Name("cluster") FullClusterRestartUpgradeStatus upgradeStatus) { From 61ff4bb79a48ad69601281c6cc0da65a6562e5e9 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Wed, 10 Jan 2024 16:57:12 -0800 Subject: [PATCH 27/75] AwaitsFix #104241 --- .../org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java | 1 + 1 file changed, 1 insertion(+) diff --git a/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java index 2418d1104d244..25e8dc7b220c0 100644 --- a/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java +++ b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java @@ -219,6 +219,7 @@ public void testManyConcat() throws IOException { /** * Hits a circuit breaker by building many moderately long strings. */ + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/pull/104241") public void testHugeManyConcat() throws IOException { initManyLongs(); assertCircuitBreaks(() -> manyConcat(2000)); From 1e1d15129a51a5a4ec24c4a862b5313ff149d38b Mon Sep 17 00:00:00 2001 From: Samuel Nelson Date: Thu, 11 Jan 2024 14:46:25 +1300 Subject: [PATCH 28/75] Include user's privileges actions in IdP plugin `_has_privileges` request (#104026) * Include user's privileges actions in IdP plugin has privileges request * Update docs/changelog/104026.yaml * Use `GroupedActionListener` instead of nested listeners Co-authored-by: Tim Vernum * Fixes after applying review suggestion * Fix IT flakiness --------- Co-authored-by: Tim Vernum --- docs/changelog/104026.yaml | 5 ++ .../idp/IdentityProviderAuthenticationIT.java | 6 +- .../src/javaRestTest/resources/roles.yml | 2 +- .../idp/privileges/UserPrivilegeResolver.java | 27 +++++- .../UserPrivilegeResolverTests.java | 88 +++++++++++++++++-- 5 files changed, 117 insertions(+), 11 deletions(-) create mode 100644 docs/changelog/104026.yaml diff --git a/docs/changelog/104026.yaml b/docs/changelog/104026.yaml new file mode 100644 index 0000000000000..d9aa704de1dbd --- /dev/null +++ b/docs/changelog/104026.yaml @@ -0,0 +1,5 @@ +pr: 104026 +summary: Include user's privileges actions in IdP plugin `_has_privileges` request +area: IdentityProvider +type: enhancement +issues: [] diff --git a/x-pack/plugin/identity-provider/qa/idp-rest-tests/src/javaRestTest/java/org/elasticsearch/xpack/idp/IdentityProviderAuthenticationIT.java b/x-pack/plugin/identity-provider/qa/idp-rest-tests/src/javaRestTest/java/org/elasticsearch/xpack/idp/IdentityProviderAuthenticationIT.java index b8565bc4ff898..c065e8d7e1d12 100644 --- a/x-pack/plugin/identity-provider/qa/idp-rest-tests/src/javaRestTest/java/org/elasticsearch/xpack/idp/IdentityProviderAuthenticationIT.java +++ b/x-pack/plugin/identity-provider/qa/idp-rest-tests/src/javaRestTest/java/org/elasticsearch/xpack/idp/IdentityProviderAuthenticationIT.java @@ -30,7 +30,7 @@ import java.util.Map; import java.util.Set; -import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; @@ -185,8 +185,8 @@ private void authenticateWithSamlResponse(String samlResponse, @Nullable String equalTo("urn:oasis:names:tc:SAML:2.0:nameid-format:transient") ); assertThat(ObjectPath.eval("metadata.saml_roles", authMap), instanceOf(List.class)); - assertThat(ObjectPath.eval("metadata.saml_roles", authMap), hasSize(1)); - assertThat(ObjectPath.eval("metadata.saml_roles", authMap), contains("viewer")); + assertThat(ObjectPath.eval("metadata.saml_roles", authMap), hasSize(2)); + assertThat(ObjectPath.eval("metadata.saml_roles", authMap), containsInAnyOrder("viewer", "custom")); } } diff --git a/x-pack/plugin/identity-provider/qa/idp-rest-tests/src/javaRestTest/resources/roles.yml b/x-pack/plugin/identity-provider/qa/idp-rest-tests/src/javaRestTest/resources/roles.yml index 0867c806f3126..174d92a033b42 100644 --- a/x-pack/plugin/identity-provider/qa/idp-rest-tests/src/javaRestTest/resources/roles.yml +++ b/x-pack/plugin/identity-provider/qa/idp-rest-tests/src/javaRestTest/resources/roles.yml @@ -8,4 +8,4 @@ idp_user: applications: - application: elastic-cloud resources: ["ec:123456:abcdefg"] - privileges: ["sso:viewer"] + privileges: ["sso:viewer", "sso:custom"] diff --git a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/privileges/UserPrivilegeResolver.java b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/privileges/UserPrivilegeResolver.java index c4ffe65feae5a..e2ddfc98bcc14 100644 --- a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/privileges/UserPrivilegeResolver.java +++ b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/privileges/UserPrivilegeResolver.java @@ -10,15 +10,20 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.GroupedActionListener; import org.elasticsearch.client.internal.Client; import org.elasticsearch.common.Strings; import org.elasticsearch.xpack.core.security.SecurityContext; +import org.elasticsearch.xpack.core.security.action.user.GetUserPrivilegesAction; +import org.elasticsearch.xpack.core.security.action.user.GetUserPrivilegesRequest; +import org.elasticsearch.xpack.core.security.action.user.GetUserPrivilegesRequestBuilder; import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesAction; import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesRequest; import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesResponse; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.authz.permission.ResourcePrivileges; +import java.util.Arrays; import java.util.Map; import java.util.Objects; import java.util.Set; @@ -128,7 +133,8 @@ private void buildResourcePrivilege( ServiceProviderPrivileges service, ActionListener listener ) { - actionsResolver.getActions(service.getApplicationName(), listener.delegateFailureAndWrap((delegate, actions) -> { + var groupedListener = new GroupedActionListener>(2, listener.delegateFailureAndWrap((delegate, actionSets) -> { + final Set actions = actionSets.stream().flatMap(Set::stream).collect(Collectors.toUnmodifiableSet()); if (actions == null || actions.isEmpty()) { logger.warn("No application-privilege actions defined for application [{}]", service.getApplicationName()); delegate.onResponse(null); @@ -141,5 +147,24 @@ private void buildResourcePrivilege( delegate.onResponse(builder.build()); } })); + + // We need to enumerate possible actions that might be authorized for the user. Here we combine actions that + // have been granted to the user via roles and other actions that are registered privileges for the given + // application. These actions will be checked by a has-privileges check above + final GetUserPrivilegesRequest request = new GetUserPrivilegesRequestBuilder(client).username(securityContext.getUser().principal()) + .request(); + client.execute( + GetUserPrivilegesAction.INSTANCE, + request, + groupedListener.map( + userPrivileges -> userPrivileges.getApplicationPrivileges() + .stream() + .filter(appPriv -> appPriv.getApplication().equals(service.getApplicationName())) + .map(appPriv -> appPriv.getPrivileges()) + .flatMap(Arrays::stream) + .collect(Collectors.toUnmodifiableSet()) + ) + ); + actionsResolver.getActions(service.getApplicationName(), groupedListener); } } diff --git a/x-pack/plugin/identity-provider/src/test/java/org/elasticsearch/xpack/idp/privileges/UserPrivilegeResolverTests.java b/x-pack/plugin/identity-provider/src/test/java/org/elasticsearch/xpack/idp/privileges/UserPrivilegeResolverTests.java index 621c9f764de56..7b569e405732f 100644 --- a/x-pack/plugin/identity-provider/src/test/java/org/elasticsearch/xpack/idp/privileges/UserPrivilegeResolverTests.java +++ b/x-pack/plugin/identity-provider/src/test/java/org/elasticsearch/xpack/idp/privileges/UserPrivilegeResolverTests.java @@ -17,16 +17,20 @@ import org.elasticsearch.core.Tuple; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.security.SecurityContext; +import org.elasticsearch.xpack.core.security.action.user.GetUserPrivilegesAction; +import org.elasticsearch.xpack.core.security.action.user.GetUserPrivilegesRequest; +import org.elasticsearch.xpack.core.security.action.user.GetUserPrivilegesResponse; import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesAction; import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesRequest; import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesResponse; import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authc.AuthenticationTestHelper; +import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.authz.permission.ResourcePrivileges; import org.elasticsearch.xpack.core.security.user.User; import org.junit.Before; -import org.mockito.Mockito; +import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.Map; @@ -50,11 +54,14 @@ public class UserPrivilegeResolverTests extends ESTestCase { private SecurityContext securityContext; private UserPrivilegeResolver resolver; + private String app; + @Before @SuppressWarnings("unchecked") public void setupTest() { client = mock(Client.class); securityContext = new SecurityContext(Settings.EMPTY, new ThreadContext(Settings.EMPTY)); + app = randomAlphaOfLengthBetween(3, 8); final ApplicationActionsResolver actionsResolver = mock(ApplicationActionsResolver.class); doAnswer(inv -> { final Object[] args = inv.getArguments(); @@ -63,12 +70,41 @@ public void setupTest() { listener.onResponse(Set.of("role:cluster:view", "role:cluster:admin", "role:cluster:operator", "role:cluster:monitor")); return null; }).when(actionsResolver).getActions(anyString(), any(ActionListener.class)); + doAnswer(inv -> { + final Object[] args = inv.getArguments(); + assertThat(args, arrayWithSize(3)); + ActionListener listener = (ActionListener) args[args.length - 1]; + RoleDescriptor.ApplicationResourcePrivileges appPriv1 = RoleDescriptor.ApplicationResourcePrivileges.builder() + .application(app) + .resources("resource1") + .privileges("role:extra1") + .build(); + RoleDescriptor.ApplicationResourcePrivileges appPriv2 = RoleDescriptor.ApplicationResourcePrivileges.builder() + .application(app) + .resources("resource1") + .privileges("role:extra2", "role:extra3") + .build(); + RoleDescriptor.ApplicationResourcePrivileges discardedAppPriv = RoleDescriptor.ApplicationResourcePrivileges.builder() + .application(randomAlphaOfLengthBetween(3, 8)) + .resources("resource1") + .privileges("role:discarded") + .build(); + GetUserPrivilegesResponse response = new GetUserPrivilegesResponse( + Set.of(), + Set.of(), + Set.of(), + Set.of(appPriv1, appPriv2, discardedAppPriv), + Set.of(), + Set.of() + ); + listener.onResponse(response); + return null; + }).when(client).execute(same(GetUserPrivilegesAction.INSTANCE), any(GetUserPrivilegesRequest.class), any(ActionListener.class)); resolver = new UserPrivilegeResolver(client, securityContext, actionsResolver); } public void testResolveZeroAccess() throws Exception { final String username = randomAlphaOfLengthBetween(4, 12); - final String app = randomAlphaOfLengthBetween(3, 8); setupUser(username, () -> { setupHasPrivileges(username, app); final PlainActionFuture future = new PlainActionFuture<>(); @@ -93,7 +129,6 @@ public void testResolveZeroAccess() throws Exception { public void testResolveSsoWithNoRoleAccess() throws Exception { final String username = randomAlphaOfLengthBetween(4, 12); - final String app = randomAlphaOfLengthBetween(3, 8); final String resource = "cluster:" + MessageDigests.toHexString(randomByteArrayOfLength(16)); final String viewerAction = "role:cluster:view"; final String adminAction = "role:cluster:admin"; @@ -118,7 +153,6 @@ public void testResolveSsoWithNoRoleAccess() throws Exception { public void testResolveSsoWithSingleRole() throws Exception { final String username = randomAlphaOfLengthBetween(4, 12); - final String app = randomAlphaOfLengthBetween(3, 8); final String resource = "cluster:" + MessageDigests.toHexString(randomByteArrayOfLength(16)); final String viewerAction = "role:cluster:view"; final String adminAction = "role:cluster:admin"; @@ -143,7 +177,6 @@ public void testResolveSsoWithSingleRole() throws Exception { public void testResolveSsoWithMultipleRoles() throws Exception { final String username = randomAlphaOfLengthBetween(4, 12); - final String app = randomAlphaOfLengthBetween(3, 8); final String resource = "cluster:" + MessageDigests.toHexString(randomByteArrayOfLength(16)); final String viewerAction = "role:cluster:view"; final String adminAction = "role:cluster:admin"; @@ -183,6 +216,35 @@ public void testResolveSsoWithMultipleRoles() throws Exception { }); } + public void testResolveSsoWithActionDefinedInUserPrivileges() throws Exception { + final String username = randomAlphaOfLengthBetween(4, 12); + final String resource = "cluster:" + MessageDigests.toHexString(randomByteArrayOfLength(16)); + final String actionInUserPrivs = "role:extra2"; + final String adminAction = "role:cluster:admin"; + + setupUser(username, () -> { + setupHasPrivileges(username, app, access(resource, actionInUserPrivs, true), access(resource, adminAction, false)); + + final PlainActionFuture future = new PlainActionFuture<>(); + final Function> roleMapping = Map.of( + actionInUserPrivs, + Set.of("extra2"), + adminAction, + Set.of("admin") + )::get; + resolver.resolve(service(app, resource, roleMapping), future); + final UserPrivilegeResolver.UserPrivileges privileges; + try { + privileges = future.get(); + } catch (Exception e) { + throw new RuntimeException(e); + } + assertThat(privileges.principal, equalTo(username)); + assertThat(privileges.hasAccess, equalTo(true)); + assertThat(privileges.roles, containsInAnyOrder("extra2")); + }); + } + private ServiceProviderPrivileges service(String appName, String resource, Function> roleMapping) { return new ServiceProviderPrivileges(appName, resource, roleMapping); } @@ -209,10 +271,24 @@ private HasPrivilegesResponse setupHasPrivileges( final Map> appPrivs = Map.of(appName, privileges); final HasPrivilegesResponse response = new HasPrivilegesResponse(username, isCompleteMatch, Map.of(), Set.of(), appPrivs); - Mockito.doAnswer(inv -> { + doAnswer(inv -> { final Object[] args = inv.getArguments(); assertThat(args.length, equalTo(3)); ActionListener listener = (ActionListener) args[args.length - 1]; + HasPrivilegesRequest request = (HasPrivilegesRequest) args[1]; + Set gotPriviliges = Arrays.stream(request.applicationPrivileges()) + .flatMap(appPriv -> Arrays.stream(appPriv.getPrivileges())) + .collect(Collectors.toUnmodifiableSet()); + Set expectedPrivileges = Set.of( + "role:cluster:view", + "role:cluster:admin", + "role:cluster:operator", + "role:cluster:monitor", + "role:extra1", + "role:extra2", + "role:extra3" + ); + assertEquals(expectedPrivileges, gotPriviliges); listener.onResponse(response); return null; }).when(client).execute(same(HasPrivilegesAction.INSTANCE), any(HasPrivilegesRequest.class), any(ActionListener.class)); From 8e3efae03df386dc80ab1a4ac3e620d16dc9828c Mon Sep 17 00:00:00 2001 From: Costin Leau Date: Wed, 10 Jan 2024 20:57:13 -0800 Subject: [PATCH 29/75] ESQL: Introduce mode setting for ENRICH (#103949) Extend grammar to allow setting a mode for ENRICH to indicate the policy resolution when running against remote clusters/CCQ. --- docs/changelog/103949.yaml | 5 + .../org/elasticsearch/TransportVersions.java | 1 + .../esql/src/main/antlr/EsqlBaseLexer.g4 | 52 +- .../esql/src/main/antlr/EsqlBaseParser.g4 | 8 +- .../xpack/esql/analysis/Analyzer.java | 12 +- .../xpack/esql/io/stream/PlanNamedTypes.java | 9 + .../xpack/esql/parser/EsqlBaseLexer.interp | 29 +- .../xpack/esql/parser/EsqlBaseLexer.java | 1434 +++++++++-------- .../xpack/esql/parser/EsqlBaseParser.interp | 17 +- .../xpack/esql/parser/EsqlBaseParser.java | 1368 ++++++++-------- .../parser/EsqlBaseParserBaseListener.java | 12 + .../parser/EsqlBaseParserBaseVisitor.java | 7 + .../esql/parser/EsqlBaseParserListener.java | 10 + .../esql/parser/EsqlBaseParserVisitor.java | 6 + .../xpack/esql/parser/IdentifierBuilder.java | 4 +- .../xpack/esql/parser/LogicalPlanBuilder.java | 43 +- .../xpack/esql/plan/logical/Enrich.java | 40 +- .../esql/parser/StatementParserTests.java | 31 +- 18 files changed, 1728 insertions(+), 1360 deletions(-) create mode 100644 docs/changelog/103949.yaml diff --git a/docs/changelog/103949.yaml b/docs/changelog/103949.yaml new file mode 100644 index 0000000000000..96bd76d89ceae --- /dev/null +++ b/docs/changelog/103949.yaml @@ -0,0 +1,5 @@ +pr: 103949 +summary: "ESQL: Introduce mode setting for ENRICH" +area: ES|QL +type: feature +issues: [] diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index f289a7a3c89a1..5b2819f04ec24 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -182,6 +182,7 @@ static TransportVersion def(int id) { public static final TransportVersion ESQL_PLAN_POINT_LITERAL_WKB = def(8_570_00_0); public static final TransportVersion HOT_THREADS_AS_BYTES = def(8_571_00_0); public static final TransportVersion ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED = def(8_572_00_0); + public static final TransportVersion ESQL_ENRICH_POLICY_CCQ_MODE = def(8_573_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 index dbaefa2e5aebf..8f47a79cde0bc 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 @@ -177,8 +177,8 @@ EXPR_WS // mode FROM_MODE; FROM_PIPE : PIPE -> type(PIPE), popMode; -FROM_OPENING_BRACKET : OPENING_BRACKET -> type(OPENING_BRACKET), pushMode(FROM_MODE), pushMode(FROM_MODE); -FROM_CLOSING_BRACKET : CLOSING_BRACKET -> type(CLOSING_BRACKET), popMode, popMode; +FROM_OPENING_BRACKET : OPENING_BRACKET -> type(OPENING_BRACKET); +FROM_CLOSING_BRACKET : CLOSING_BRACKET -> type(CLOSING_BRACKET); FROM_COMMA : COMMA -> type(COMMA); FROM_ASSIGN : ASSIGN -> type(ASSIGN); @@ -220,11 +220,15 @@ fragment UNQUOTED_ID_BODY_WITH_PATTERN : (LETTER | DIGIT | UNDERSCORE | ASTERISK) ; -PROJECT_UNQUOTED_IDENTIFIER +UNQUOTED_ID_PATTERN : (LETTER | ASTERISK) UNQUOTED_ID_BODY_WITH_PATTERN* | (UNDERSCORE | ASPERAND) UNQUOTED_ID_BODY_WITH_PATTERN+ ; +PROJECT_UNQUOTED_IDENTIFIER + : UNQUOTED_ID_PATTERN -> type(UNQUOTED_ID_PATTERN) + ; + PROJECT_QUOTED_IDENTIFIER : QUOTED_IDENTIFIER -> type(QUOTED_IDENTIFIER) ; @@ -257,7 +261,7 @@ RENAME_QUOTED_IDENTIFIER // use the unquoted pattern to let the parser invalidate fields with * RENAME_UNQUOTED_IDENTIFIER - : PROJECT_UNQUOTED_IDENTIFIER -> type(PROJECT_UNQUOTED_IDENTIFIER) + : UNQUOTED_ID_PATTERN -> type(UNQUOTED_ID_PATTERN) ; RENAME_LINE_COMMENT @@ -275,19 +279,29 @@ RENAME_WS // | ENRICH ON key WITH fields mode ENRICH_MODE; ENRICH_PIPE : PIPE -> type(PIPE), popMode; +ENRICH_OPENING_BRACKET : OPENING_BRACKET -> type(OPENING_BRACKET), pushMode(SETTING_MODE); ON : 'on' -> pushMode(ENRICH_FIELD_MODE); WITH : 'with' -> pushMode(ENRICH_FIELD_MODE); -// use the unquoted pattern to let the parser invalidate fields with * -ENRICH_POLICY_UNQUOTED_IDENTIFIER - : FROM_UNQUOTED_IDENTIFIER -> type(FROM_UNQUOTED_IDENTIFIER) +// similar to that of an index +// see https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-create-index.html#indices-create-api-path-params +fragment ENRICH_POLICY_NAME_BODY + : ~[\\/?"<>| ,#\t\r\n:] + ; + +ENRICH_POLICY_NAME + : (LETTER | DIGIT) ENRICH_POLICY_NAME_BODY* ; ENRICH_QUOTED_IDENTIFIER : QUOTED_IDENTIFIER -> type(QUOTED_IDENTIFIER) ; +ENRICH_MODE_UNQUOTED_VALUE + : ENRICH_POLICY_NAME -> type(ENRICH_POLICY_NAME) + ; + ENRICH_LINE_COMMENT : LINE_COMMENT -> channel(HIDDEN) ; @@ -310,7 +324,7 @@ ENRICH_FIELD_DOT: DOT -> type(DOT); ENRICH_FIELD_WITH : WITH -> type(WITH) ; ENRICH_FIELD_UNQUOTED_IDENTIFIER - : PROJECT_UNQUOTED_IDENTIFIER -> type(PROJECT_UNQUOTED_IDENTIFIER) + : UNQUOTED_ID_PATTERN -> type(UNQUOTED_ID_PATTERN) ; ENRICH_FIELD_QUOTED_IDENTIFIER @@ -373,3 +387,25 @@ SHOW_MULTILINE_COMMENT SHOW_WS : WS -> channel(HIDDEN) ; + +mode SETTING_MODE; +SETTING_CLOSING_BRACKET : CLOSING_BRACKET -> type(CLOSING_BRACKET), popMode; + +COLON : ':'; + +SETTING + : (ASPERAND | DIGIT| DOT | LETTER | UNDERSCORE)+ + ; + +SETTING_LINE_COMMENT + : LINE_COMMENT -> channel(HIDDEN) + ; + +SETTTING_MULTILINE_COMMENT + : MULTILINE_COMMENT -> channel(HIDDEN) + ; + +SETTING_WS + : WS -> channel(HIDDEN) + ; + diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 index cdf0cea58b230..fd269a779cfd5 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 @@ -141,7 +141,7 @@ identifier ; identifierPattern - : PROJECT_UNQUOTED_IDENTIFIER + : UNQUOTED_ID_PATTERN | QUOTED_IDENTIFIER ; @@ -246,9 +246,13 @@ showCommand ; enrichCommand - : ENRICH policyName=fromIdentifier (ON matchField=qualifiedNamePattern)? (WITH enrichWithClause (COMMA enrichWithClause)*)? + : ENRICH setting* policyName=ENRICH_POLICY_NAME (ON matchField=qualifiedNamePattern)? (WITH enrichWithClause (COMMA enrichWithClause)*)? ; enrichWithClause : (newName=qualifiedNamePattern ASSIGN)? enrichField=qualifiedNamePattern ; + +setting + : OPENING_BRACKET name=SETTING COLON value=SETTING CLOSING_BRACKET + ; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java index 674a32db1f0fb..681027392c358 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java @@ -243,7 +243,7 @@ protected LogicalPlan rule(Enrich plan, AnalyzerContext context) { policy ); - return new Enrich(plan.source(), plan.child(), policyNameExp, matchField, policyRes, enrichFields); + return new Enrich(plan.source(), plan.child(), plan.mode(), policyNameExp, matchField, policyRes, enrichFields); } private String unresolvedPolicyError(String policyName, EnrichResolution enrichResolution) { @@ -584,7 +584,15 @@ private LogicalPlan resolveEnrich(Enrich enrich, List childrenOutput) "Unsupported type [" + resolved.dataType() + "] for enrich matching field [" + ua.name() + "]; only KEYWORD allowed" ); } - return new Enrich(enrich.source(), enrich.child(), enrich.policyName(), resolved, enrich.policy(), enrich.enrichFields()); + return new Enrich( + enrich.source(), + enrich.child(), + enrich.mode(), + enrich.policyName(), + resolved, + enrich.policy(), + enrich.enrichFields() + ); } return enrich; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java index e655a60825f3a..6031d1b06ebfd 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java @@ -721,9 +721,14 @@ static void writeEval(PlanStreamOutput out, Eval eval) throws IOException { } static Enrich readEnrich(PlanStreamInput in) throws IOException { + Enrich.Mode m = Enrich.Mode.ANY; + if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_ENRICH_POLICY_CCQ_MODE)) { + m = in.readEnum(Enrich.Mode.class); + } return new Enrich( in.readSource(), in.readLogicalPlanNode(), + m, in.readExpression(), in.readNamedExpression(), new EnrichPolicyResolution(in.readString(), new EnrichPolicy(in), IndexResolution.valid(readEsIndex(in))), @@ -732,6 +737,10 @@ static Enrich readEnrich(PlanStreamInput in) throws IOException { } static void writeEnrich(PlanStreamOutput out, Enrich enrich) throws IOException { + if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_ENRICH_POLICY_CCQ_MODE)) { + out.writeEnum(enrich.mode()); + } + out.writeNoSource(); out.writeLogicalPlanNode(enrich.child()); out.writeExpression(enrich.policyName()); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp index 585f722065e6f..8810debacc13a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp @@ -93,11 +93,17 @@ null null null null +null 'info' 'functions' null null null +':' +null +null +null +null token symbolic names: null @@ -175,7 +181,7 @@ FROM_UNQUOTED_IDENTIFIER FROM_LINE_COMMENT FROM_MULTILINE_COMMENT FROM_WS -PROJECT_UNQUOTED_IDENTIFIER +UNQUOTED_ID_PATTERN PROJECT_LINE_COMMENT PROJECT_MULTILINE_COMMENT PROJECT_WS @@ -185,6 +191,7 @@ RENAME_MULTILINE_COMMENT RENAME_WS ON WITH +ENRICH_POLICY_NAME ENRICH_LINE_COMMENT ENRICH_MULTILINE_COMMENT ENRICH_WS @@ -199,6 +206,11 @@ FUNCTIONS SHOW_LINE_COMMENT SHOW_MULTILINE_COMMENT SHOW_WS +COLON +SETTING +SETTING_LINE_COMMENT +SETTTING_MULTILINE_COMMENT +SETTING_WS rule names: DISSECT @@ -298,6 +310,7 @@ PROJECT_PIPE PROJECT_DOT PROJECT_COMMA UNQUOTED_ID_BODY_WITH_PATTERN +UNQUOTED_ID_PATTERN PROJECT_UNQUOTED_IDENTIFIER PROJECT_QUOTED_IDENTIFIER PROJECT_LINE_COMMENT @@ -314,10 +327,13 @@ RENAME_LINE_COMMENT RENAME_MULTILINE_COMMENT RENAME_WS ENRICH_PIPE +ENRICH_OPENING_BRACKET ON WITH -ENRICH_POLICY_UNQUOTED_IDENTIFIER +ENRICH_POLICY_NAME_BODY +ENRICH_POLICY_NAME ENRICH_QUOTED_IDENTIFIER +ENRICH_MODE_UNQUOTED_VALUE ENRICH_LINE_COMMENT ENRICH_MULTILINE_COMMENT ENRICH_WS @@ -344,6 +360,12 @@ FUNCTIONS SHOW_LINE_COMMENT SHOW_MULTILINE_COMMENT SHOW_WS +SETTING_CLOSING_BRACKET +COLON +SETTING +SETTING_LINE_COMMENT +SETTTING_MULTILINE_COMMENT +SETTING_WS channel names: DEFAULT_TOKEN_CHANNEL @@ -360,6 +382,7 @@ ENRICH_MODE ENRICH_FIELD_MODE MVEXPAND_MODE SHOW_MODE +SETTING_MODE atn: -[4, 0, 98, 1090, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 4, 18, 451, 8, 18, 11, 18, 12, 18, 452, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 5, 19, 461, 8, 19, 10, 19, 12, 19, 464, 9, 19, 1, 19, 3, 19, 467, 8, 19, 1, 19, 3, 19, 470, 8, 19, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 5, 20, 479, 8, 20, 10, 20, 12, 20, 482, 9, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 21, 4, 21, 490, 8, 21, 11, 21, 12, 21, 491, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 1, 27, 1, 27, 1, 28, 1, 28, 1, 29, 1, 29, 1, 30, 1, 30, 1, 30, 1, 31, 1, 31, 1, 32, 1, 32, 3, 32, 533, 8, 32, 1, 32, 4, 32, 536, 8, 32, 11, 32, 12, 32, 537, 1, 33, 1, 33, 1, 34, 1, 34, 1, 35, 1, 35, 1, 35, 3, 35, 547, 8, 35, 1, 36, 1, 36, 1, 37, 1, 37, 1, 37, 3, 37, 554, 8, 37, 1, 38, 1, 38, 1, 38, 5, 38, 559, 8, 38, 10, 38, 12, 38, 562, 9, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 5, 38, 570, 8, 38, 10, 38, 12, 38, 573, 9, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 3, 38, 580, 8, 38, 1, 38, 3, 38, 583, 8, 38, 3, 38, 585, 8, 38, 1, 39, 4, 39, 588, 8, 39, 11, 39, 12, 39, 589, 1, 40, 4, 40, 593, 8, 40, 11, 40, 12, 40, 594, 1, 40, 1, 40, 5, 40, 599, 8, 40, 10, 40, 12, 40, 602, 9, 40, 1, 40, 1, 40, 4, 40, 606, 8, 40, 11, 40, 12, 40, 607, 1, 40, 4, 40, 611, 8, 40, 11, 40, 12, 40, 612, 1, 40, 1, 40, 5, 40, 617, 8, 40, 10, 40, 12, 40, 620, 9, 40, 3, 40, 622, 8, 40, 1, 40, 1, 40, 1, 40, 1, 40, 4, 40, 628, 8, 40, 11, 40, 12, 40, 629, 1, 40, 1, 40, 3, 40, 634, 8, 40, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 62, 1, 62, 1, 62, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 1, 64, 1, 64, 1, 64, 1, 65, 1, 65, 1, 66, 1, 66, 1, 66, 1, 67, 1, 67, 1, 68, 1, 68, 1, 68, 1, 69, 1, 69, 1, 70, 1, 70, 1, 71, 1, 71, 1, 72, 1, 72, 1, 73, 1, 73, 1, 74, 1, 74, 1, 74, 1, 74, 1, 74, 1, 75, 1, 75, 1, 75, 1, 75, 1, 75, 1, 76, 1, 76, 5, 76, 759, 8, 76, 10, 76, 12, 76, 762, 9, 76, 1, 76, 1, 76, 3, 76, 766, 8, 76, 1, 76, 4, 76, 769, 8, 76, 11, 76, 12, 76, 770, 3, 76, 773, 8, 76, 1, 77, 1, 77, 4, 77, 777, 8, 77, 11, 77, 12, 77, 778, 1, 77, 1, 77, 1, 78, 1, 78, 1, 78, 1, 78, 1, 79, 1, 79, 1, 79, 1, 79, 1, 80, 1, 80, 1, 80, 1, 80, 1, 81, 1, 81, 1, 81, 1, 81, 1, 81, 1, 82, 1, 82, 1, 82, 1, 82, 1, 82, 1, 82, 1, 83, 1, 83, 1, 83, 1, 83, 1, 83, 1, 83, 1, 84, 1, 84, 1, 84, 1, 84, 1, 85, 1, 85, 1, 85, 1, 85, 1, 86, 1, 86, 1, 86, 1, 86, 1, 86, 1, 86, 1, 86, 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 3, 87, 832, 8, 87, 1, 88, 4, 88, 835, 8, 88, 11, 88, 12, 88, 836, 1, 89, 1, 89, 1, 89, 1, 89, 1, 90, 1, 90, 1, 90, 1, 90, 1, 91, 1, 91, 1, 91, 1, 91, 1, 92, 1, 92, 1, 92, 1, 92, 1, 93, 1, 93, 1, 93, 1, 93, 1, 93, 1, 94, 1, 94, 1, 94, 1, 94, 1, 95, 1, 95, 1, 95, 1, 95, 1, 96, 1, 96, 1, 96, 1, 96, 3, 96, 872, 8, 96, 1, 97, 1, 97, 3, 97, 876, 8, 97, 1, 97, 5, 97, 879, 8, 97, 10, 97, 12, 97, 882, 9, 97, 1, 97, 1, 97, 3, 97, 886, 8, 97, 1, 97, 4, 97, 889, 8, 97, 11, 97, 12, 97, 890, 3, 97, 893, 8, 97, 1, 98, 1, 98, 1, 98, 1, 98, 1, 99, 1, 99, 1, 99, 1, 99, 1, 100, 1, 100, 1, 100, 1, 100, 1, 101, 1, 101, 1, 101, 1, 101, 1, 102, 1, 102, 1, 102, 1, 102, 1, 102, 1, 103, 1, 103, 1, 103, 1, 103, 1, 104, 1, 104, 1, 104, 1, 104, 1, 105, 1, 105, 1, 105, 1, 105, 1, 106, 1, 106, 1, 106, 1, 107, 1, 107, 1, 107, 1, 107, 1, 108, 1, 108, 1, 108, 1, 108, 1, 109, 1, 109, 1, 109, 1, 109, 1, 110, 1, 110, 1, 110, 1, 110, 1, 111, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 112, 1, 112, 1, 113, 1, 113, 1, 113, 1, 113, 1, 113, 1, 114, 1, 114, 1, 114, 1, 114, 1, 114, 1, 114, 1, 114, 1, 115, 1, 115, 1, 115, 1, 115, 1, 116, 1, 116, 1, 116, 1, 116, 1, 117, 1, 117, 1, 117, 1, 117, 1, 118, 1, 118, 1, 118, 1, 118, 1, 119, 1, 119, 1, 119, 1, 119, 1, 120, 1, 120, 1, 120, 1, 120, 1, 120, 1, 120, 1, 121, 1, 121, 1, 121, 1, 121, 1, 122, 1, 122, 1, 122, 1, 122, 1, 123, 1, 123, 1, 123, 1, 123, 1, 124, 1, 124, 1, 124, 1, 124, 1, 125, 1, 125, 1, 125, 1, 125, 1, 126, 1, 126, 1, 126, 1, 126, 1, 127, 1, 127, 1, 127, 1, 127, 1, 128, 1, 128, 1, 128, 1, 128, 1, 129, 1, 129, 1, 129, 1, 129, 1, 130, 1, 130, 1, 130, 1, 130, 1, 130, 1, 131, 1, 131, 1, 131, 1, 131, 1, 132, 1, 132, 1, 132, 1, 132, 1, 133, 1, 133, 1, 133, 1, 133, 1, 134, 1, 134, 1, 134, 1, 134, 1, 135, 1, 135, 1, 135, 1, 135, 1, 136, 1, 136, 1, 136, 1, 136, 1, 137, 1, 137, 1, 137, 1, 137, 1, 137, 1, 138, 1, 138, 1, 138, 1, 138, 1, 138, 1, 139, 1, 139, 1, 139, 1, 139, 1, 139, 1, 139, 1, 139, 1, 139, 1, 139, 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 1, 142, 1, 142, 2, 480, 571, 0, 143, 10, 1, 12, 2, 14, 3, 16, 4, 18, 5, 20, 6, 22, 7, 24, 8, 26, 9, 28, 10, 30, 11, 32, 12, 34, 13, 36, 14, 38, 15, 40, 16, 42, 17, 44, 18, 46, 19, 48, 20, 50, 21, 52, 22, 54, 0, 56, 0, 58, 23, 60, 24, 62, 25, 64, 26, 66, 0, 68, 0, 70, 0, 72, 0, 74, 0, 76, 0, 78, 0, 80, 0, 82, 0, 84, 0, 86, 27, 88, 28, 90, 29, 92, 30, 94, 31, 96, 32, 98, 33, 100, 34, 102, 35, 104, 36, 106, 37, 108, 38, 110, 39, 112, 40, 114, 41, 116, 42, 118, 43, 120, 44, 122, 45, 124, 46, 126, 47, 128, 48, 130, 49, 132, 50, 134, 51, 136, 52, 138, 53, 140, 54, 142, 55, 144, 56, 146, 57, 148, 58, 150, 59, 152, 60, 154, 61, 156, 62, 158, 63, 160, 64, 162, 65, 164, 66, 166, 67, 168, 68, 170, 69, 172, 0, 174, 0, 176, 0, 178, 0, 180, 0, 182, 70, 184, 0, 186, 71, 188, 0, 190, 72, 192, 73, 194, 74, 196, 0, 198, 0, 200, 0, 202, 0, 204, 75, 206, 0, 208, 76, 210, 77, 212, 78, 214, 0, 216, 0, 218, 0, 220, 0, 222, 79, 224, 0, 226, 0, 228, 80, 230, 81, 232, 82, 234, 0, 236, 83, 238, 84, 240, 0, 242, 0, 244, 85, 246, 86, 248, 87, 250, 0, 252, 0, 254, 0, 256, 0, 258, 0, 260, 0, 262, 0, 264, 88, 266, 89, 268, 90, 270, 0, 272, 0, 274, 0, 276, 0, 278, 91, 280, 92, 282, 93, 284, 0, 286, 94, 288, 95, 290, 96, 292, 97, 294, 98, 10, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 12, 6, 0, 9, 10, 13, 13, 32, 32, 47, 47, 91, 91, 93, 93, 2, 0, 10, 10, 13, 13, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 5, 0, 34, 34, 92, 92, 110, 110, 114, 114, 116, 116, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 2, 0, 69, 69, 101, 101, 2, 0, 43, 43, 45, 45, 1, 0, 96, 96, 10, 0, 9, 10, 13, 13, 32, 32, 44, 44, 47, 47, 61, 61, 91, 91, 93, 93, 96, 96, 124, 124, 2, 0, 42, 42, 47, 47, 1112, 0, 10, 1, 0, 0, 0, 0, 12, 1, 0, 0, 0, 0, 14, 1, 0, 0, 0, 0, 16, 1, 0, 0, 0, 0, 18, 1, 0, 0, 0, 0, 20, 1, 0, 0, 0, 0, 22, 1, 0, 0, 0, 0, 24, 1, 0, 0, 0, 0, 26, 1, 0, 0, 0, 0, 28, 1, 0, 0, 0, 0, 30, 1, 0, 0, 0, 0, 32, 1, 0, 0, 0, 0, 34, 1, 0, 0, 0, 0, 36, 1, 0, 0, 0, 0, 38, 1, 0, 0, 0, 0, 40, 1, 0, 0, 0, 0, 42, 1, 0, 0, 0, 0, 44, 1, 0, 0, 0, 0, 46, 1, 0, 0, 0, 0, 48, 1, 0, 0, 0, 0, 50, 1, 0, 0, 0, 0, 52, 1, 0, 0, 0, 1, 54, 1, 0, 0, 0, 1, 56, 1, 0, 0, 0, 1, 58, 1, 0, 0, 0, 1, 60, 1, 0, 0, 0, 1, 62, 1, 0, 0, 0, 2, 64, 1, 0, 0, 0, 2, 86, 1, 0, 0, 0, 2, 88, 1, 0, 0, 0, 2, 90, 1, 0, 0, 0, 2, 92, 1, 0, 0, 0, 2, 94, 1, 0, 0, 0, 2, 96, 1, 0, 0, 0, 2, 98, 1, 0, 0, 0, 2, 100, 1, 0, 0, 0, 2, 102, 1, 0, 0, 0, 2, 104, 1, 0, 0, 0, 2, 106, 1, 0, 0, 0, 2, 108, 1, 0, 0, 0, 2, 110, 1, 0, 0, 0, 2, 112, 1, 0, 0, 0, 2, 114, 1, 0, 0, 0, 2, 116, 1, 0, 0, 0, 2, 118, 1, 0, 0, 0, 2, 120, 1, 0, 0, 0, 2, 122, 1, 0, 0, 0, 2, 124, 1, 0, 0, 0, 2, 126, 1, 0, 0, 0, 2, 128, 1, 0, 0, 0, 2, 130, 1, 0, 0, 0, 2, 132, 1, 0, 0, 0, 2, 134, 1, 0, 0, 0, 2, 136, 1, 0, 0, 0, 2, 138, 1, 0, 0, 0, 2, 140, 1, 0, 0, 0, 2, 142, 1, 0, 0, 0, 2, 144, 1, 0, 0, 0, 2, 146, 1, 0, 0, 0, 2, 148, 1, 0, 0, 0, 2, 150, 1, 0, 0, 0, 2, 152, 1, 0, 0, 0, 2, 154, 1, 0, 0, 0, 2, 156, 1, 0, 0, 0, 2, 158, 1, 0, 0, 0, 2, 160, 1, 0, 0, 0, 2, 162, 1, 0, 0, 0, 2, 164, 1, 0, 0, 0, 2, 166, 1, 0, 0, 0, 2, 168, 1, 0, 0, 0, 2, 170, 1, 0, 0, 0, 3, 172, 1, 0, 0, 0, 3, 174, 1, 0, 0, 0, 3, 176, 1, 0, 0, 0, 3, 178, 1, 0, 0, 0, 3, 180, 1, 0, 0, 0, 3, 182, 1, 0, 0, 0, 3, 186, 1, 0, 0, 0, 3, 188, 1, 0, 0, 0, 3, 190, 1, 0, 0, 0, 3, 192, 1, 0, 0, 0, 3, 194, 1, 0, 0, 0, 4, 196, 1, 0, 0, 0, 4, 198, 1, 0, 0, 0, 4, 200, 1, 0, 0, 0, 4, 204, 1, 0, 0, 0, 4, 206, 1, 0, 0, 0, 4, 208, 1, 0, 0, 0, 4, 210, 1, 0, 0, 0, 4, 212, 1, 0, 0, 0, 5, 214, 1, 0, 0, 0, 5, 216, 1, 0, 0, 0, 5, 218, 1, 0, 0, 0, 5, 220, 1, 0, 0, 0, 5, 222, 1, 0, 0, 0, 5, 224, 1, 0, 0, 0, 5, 226, 1, 0, 0, 0, 5, 228, 1, 0, 0, 0, 5, 230, 1, 0, 0, 0, 5, 232, 1, 0, 0, 0, 6, 234, 1, 0, 0, 0, 6, 236, 1, 0, 0, 0, 6, 238, 1, 0, 0, 0, 6, 240, 1, 0, 0, 0, 6, 242, 1, 0, 0, 0, 6, 244, 1, 0, 0, 0, 6, 246, 1, 0, 0, 0, 6, 248, 1, 0, 0, 0, 7, 250, 1, 0, 0, 0, 7, 252, 1, 0, 0, 0, 7, 254, 1, 0, 0, 0, 7, 256, 1, 0, 0, 0, 7, 258, 1, 0, 0, 0, 7, 260, 1, 0, 0, 0, 7, 262, 1, 0, 0, 0, 7, 264, 1, 0, 0, 0, 7, 266, 1, 0, 0, 0, 7, 268, 1, 0, 0, 0, 8, 270, 1, 0, 0, 0, 8, 272, 1, 0, 0, 0, 8, 274, 1, 0, 0, 0, 8, 276, 1, 0, 0, 0, 8, 278, 1, 0, 0, 0, 8, 280, 1, 0, 0, 0, 8, 282, 1, 0, 0, 0, 9, 284, 1, 0, 0, 0, 9, 286, 1, 0, 0, 0, 9, 288, 1, 0, 0, 0, 9, 290, 1, 0, 0, 0, 9, 292, 1, 0, 0, 0, 9, 294, 1, 0, 0, 0, 10, 296, 1, 0, 0, 0, 12, 306, 1, 0, 0, 0, 14, 313, 1, 0, 0, 0, 16, 322, 1, 0, 0, 0, 18, 329, 1, 0, 0, 0, 20, 339, 1, 0, 0, 0, 22, 346, 1, 0, 0, 0, 24, 353, 1, 0, 0, 0, 26, 367, 1, 0, 0, 0, 28, 374, 1, 0, 0, 0, 30, 382, 1, 0, 0, 0, 32, 394, 1, 0, 0, 0, 34, 404, 1, 0, 0, 0, 36, 413, 1, 0, 0, 0, 38, 419, 1, 0, 0, 0, 40, 426, 1, 0, 0, 0, 42, 433, 1, 0, 0, 0, 44, 441, 1, 0, 0, 0, 46, 450, 1, 0, 0, 0, 48, 456, 1, 0, 0, 0, 50, 473, 1, 0, 0, 0, 52, 489, 1, 0, 0, 0, 54, 495, 1, 0, 0, 0, 56, 500, 1, 0, 0, 0, 58, 505, 1, 0, 0, 0, 60, 509, 1, 0, 0, 0, 62, 513, 1, 0, 0, 0, 64, 517, 1, 0, 0, 0, 66, 521, 1, 0, 0, 0, 68, 523, 1, 0, 0, 0, 70, 525, 1, 0, 0, 0, 72, 528, 1, 0, 0, 0, 74, 530, 1, 0, 0, 0, 76, 539, 1, 0, 0, 0, 78, 541, 1, 0, 0, 0, 80, 546, 1, 0, 0, 0, 82, 548, 1, 0, 0, 0, 84, 553, 1, 0, 0, 0, 86, 584, 1, 0, 0, 0, 88, 587, 1, 0, 0, 0, 90, 633, 1, 0, 0, 0, 92, 635, 1, 0, 0, 0, 94, 638, 1, 0, 0, 0, 96, 642, 1, 0, 0, 0, 98, 646, 1, 0, 0, 0, 100, 648, 1, 0, 0, 0, 102, 650, 1, 0, 0, 0, 104, 655, 1, 0, 0, 0, 106, 657, 1, 0, 0, 0, 108, 663, 1, 0, 0, 0, 110, 669, 1, 0, 0, 0, 112, 674, 1, 0, 0, 0, 114, 676, 1, 0, 0, 0, 116, 679, 1, 0, 0, 0, 118, 682, 1, 0, 0, 0, 120, 687, 1, 0, 0, 0, 122, 691, 1, 0, 0, 0, 124, 696, 1, 0, 0, 0, 126, 702, 1, 0, 0, 0, 128, 705, 1, 0, 0, 0, 130, 707, 1, 0, 0, 0, 132, 713, 1, 0, 0, 0, 134, 715, 1, 0, 0, 0, 136, 720, 1, 0, 0, 0, 138, 723, 1, 0, 0, 0, 140, 726, 1, 0, 0, 0, 142, 728, 1, 0, 0, 0, 144, 731, 1, 0, 0, 0, 146, 733, 1, 0, 0, 0, 148, 736, 1, 0, 0, 0, 150, 738, 1, 0, 0, 0, 152, 740, 1, 0, 0, 0, 154, 742, 1, 0, 0, 0, 156, 744, 1, 0, 0, 0, 158, 746, 1, 0, 0, 0, 160, 751, 1, 0, 0, 0, 162, 772, 1, 0, 0, 0, 164, 774, 1, 0, 0, 0, 166, 782, 1, 0, 0, 0, 168, 786, 1, 0, 0, 0, 170, 790, 1, 0, 0, 0, 172, 794, 1, 0, 0, 0, 174, 799, 1, 0, 0, 0, 176, 805, 1, 0, 0, 0, 178, 811, 1, 0, 0, 0, 180, 815, 1, 0, 0, 0, 182, 819, 1, 0, 0, 0, 184, 831, 1, 0, 0, 0, 186, 834, 1, 0, 0, 0, 188, 838, 1, 0, 0, 0, 190, 842, 1, 0, 0, 0, 192, 846, 1, 0, 0, 0, 194, 850, 1, 0, 0, 0, 196, 854, 1, 0, 0, 0, 198, 859, 1, 0, 0, 0, 200, 863, 1, 0, 0, 0, 202, 871, 1, 0, 0, 0, 204, 892, 1, 0, 0, 0, 206, 894, 1, 0, 0, 0, 208, 898, 1, 0, 0, 0, 210, 902, 1, 0, 0, 0, 212, 906, 1, 0, 0, 0, 214, 910, 1, 0, 0, 0, 216, 915, 1, 0, 0, 0, 218, 919, 1, 0, 0, 0, 220, 923, 1, 0, 0, 0, 222, 927, 1, 0, 0, 0, 224, 930, 1, 0, 0, 0, 226, 934, 1, 0, 0, 0, 228, 938, 1, 0, 0, 0, 230, 942, 1, 0, 0, 0, 232, 946, 1, 0, 0, 0, 234, 950, 1, 0, 0, 0, 236, 955, 1, 0, 0, 0, 238, 960, 1, 0, 0, 0, 240, 967, 1, 0, 0, 0, 242, 971, 1, 0, 0, 0, 244, 975, 1, 0, 0, 0, 246, 979, 1, 0, 0, 0, 248, 983, 1, 0, 0, 0, 250, 987, 1, 0, 0, 0, 252, 993, 1, 0, 0, 0, 254, 997, 1, 0, 0, 0, 256, 1001, 1, 0, 0, 0, 258, 1005, 1, 0, 0, 0, 260, 1009, 1, 0, 0, 0, 262, 1013, 1, 0, 0, 0, 264, 1017, 1, 0, 0, 0, 266, 1021, 1, 0, 0, 0, 268, 1025, 1, 0, 0, 0, 270, 1029, 1, 0, 0, 0, 272, 1034, 1, 0, 0, 0, 274, 1038, 1, 0, 0, 0, 276, 1042, 1, 0, 0, 0, 278, 1046, 1, 0, 0, 0, 280, 1050, 1, 0, 0, 0, 282, 1054, 1, 0, 0, 0, 284, 1058, 1, 0, 0, 0, 286, 1063, 1, 0, 0, 0, 288, 1068, 1, 0, 0, 0, 290, 1078, 1, 0, 0, 0, 292, 1082, 1, 0, 0, 0, 294, 1086, 1, 0, 0, 0, 296, 297, 5, 100, 0, 0, 297, 298, 5, 105, 0, 0, 298, 299, 5, 115, 0, 0, 299, 300, 5, 115, 0, 0, 300, 301, 5, 101, 0, 0, 301, 302, 5, 99, 0, 0, 302, 303, 5, 116, 0, 0, 303, 304, 1, 0, 0, 0, 304, 305, 6, 0, 0, 0, 305, 11, 1, 0, 0, 0, 306, 307, 5, 100, 0, 0, 307, 308, 5, 114, 0, 0, 308, 309, 5, 111, 0, 0, 309, 310, 5, 112, 0, 0, 310, 311, 1, 0, 0, 0, 311, 312, 6, 1, 1, 0, 312, 13, 1, 0, 0, 0, 313, 314, 5, 101, 0, 0, 314, 315, 5, 110, 0, 0, 315, 316, 5, 114, 0, 0, 316, 317, 5, 105, 0, 0, 317, 318, 5, 99, 0, 0, 318, 319, 5, 104, 0, 0, 319, 320, 1, 0, 0, 0, 320, 321, 6, 2, 2, 0, 321, 15, 1, 0, 0, 0, 322, 323, 5, 101, 0, 0, 323, 324, 5, 118, 0, 0, 324, 325, 5, 97, 0, 0, 325, 326, 5, 108, 0, 0, 326, 327, 1, 0, 0, 0, 327, 328, 6, 3, 0, 0, 328, 17, 1, 0, 0, 0, 329, 330, 5, 101, 0, 0, 330, 331, 5, 120, 0, 0, 331, 332, 5, 112, 0, 0, 332, 333, 5, 108, 0, 0, 333, 334, 5, 97, 0, 0, 334, 335, 5, 105, 0, 0, 335, 336, 5, 110, 0, 0, 336, 337, 1, 0, 0, 0, 337, 338, 6, 4, 3, 0, 338, 19, 1, 0, 0, 0, 339, 340, 5, 102, 0, 0, 340, 341, 5, 114, 0, 0, 341, 342, 5, 111, 0, 0, 342, 343, 5, 109, 0, 0, 343, 344, 1, 0, 0, 0, 344, 345, 6, 5, 4, 0, 345, 21, 1, 0, 0, 0, 346, 347, 5, 103, 0, 0, 347, 348, 5, 114, 0, 0, 348, 349, 5, 111, 0, 0, 349, 350, 5, 107, 0, 0, 350, 351, 1, 0, 0, 0, 351, 352, 6, 6, 0, 0, 352, 23, 1, 0, 0, 0, 353, 354, 5, 105, 0, 0, 354, 355, 5, 110, 0, 0, 355, 356, 5, 108, 0, 0, 356, 357, 5, 105, 0, 0, 357, 358, 5, 110, 0, 0, 358, 359, 5, 101, 0, 0, 359, 360, 5, 115, 0, 0, 360, 361, 5, 116, 0, 0, 361, 362, 5, 97, 0, 0, 362, 363, 5, 116, 0, 0, 363, 364, 5, 115, 0, 0, 364, 365, 1, 0, 0, 0, 365, 366, 6, 7, 0, 0, 366, 25, 1, 0, 0, 0, 367, 368, 5, 107, 0, 0, 368, 369, 5, 101, 0, 0, 369, 370, 5, 101, 0, 0, 370, 371, 5, 112, 0, 0, 371, 372, 1, 0, 0, 0, 372, 373, 6, 8, 1, 0, 373, 27, 1, 0, 0, 0, 374, 375, 5, 108, 0, 0, 375, 376, 5, 105, 0, 0, 376, 377, 5, 109, 0, 0, 377, 378, 5, 105, 0, 0, 378, 379, 5, 116, 0, 0, 379, 380, 1, 0, 0, 0, 380, 381, 6, 9, 0, 0, 381, 29, 1, 0, 0, 0, 382, 383, 5, 109, 0, 0, 383, 384, 5, 118, 0, 0, 384, 385, 5, 95, 0, 0, 385, 386, 5, 101, 0, 0, 386, 387, 5, 120, 0, 0, 387, 388, 5, 112, 0, 0, 388, 389, 5, 97, 0, 0, 389, 390, 5, 110, 0, 0, 390, 391, 5, 100, 0, 0, 391, 392, 1, 0, 0, 0, 392, 393, 6, 10, 5, 0, 393, 31, 1, 0, 0, 0, 394, 395, 5, 112, 0, 0, 395, 396, 5, 114, 0, 0, 396, 397, 5, 111, 0, 0, 397, 398, 5, 106, 0, 0, 398, 399, 5, 101, 0, 0, 399, 400, 5, 99, 0, 0, 400, 401, 5, 116, 0, 0, 401, 402, 1, 0, 0, 0, 402, 403, 6, 11, 1, 0, 403, 33, 1, 0, 0, 0, 404, 405, 5, 114, 0, 0, 405, 406, 5, 101, 0, 0, 406, 407, 5, 110, 0, 0, 407, 408, 5, 97, 0, 0, 408, 409, 5, 109, 0, 0, 409, 410, 5, 101, 0, 0, 410, 411, 1, 0, 0, 0, 411, 412, 6, 12, 6, 0, 412, 35, 1, 0, 0, 0, 413, 414, 5, 114, 0, 0, 414, 415, 5, 111, 0, 0, 415, 416, 5, 119, 0, 0, 416, 417, 1, 0, 0, 0, 417, 418, 6, 13, 0, 0, 418, 37, 1, 0, 0, 0, 419, 420, 5, 115, 0, 0, 420, 421, 5, 104, 0, 0, 421, 422, 5, 111, 0, 0, 422, 423, 5, 119, 0, 0, 423, 424, 1, 0, 0, 0, 424, 425, 6, 14, 7, 0, 425, 39, 1, 0, 0, 0, 426, 427, 5, 115, 0, 0, 427, 428, 5, 111, 0, 0, 428, 429, 5, 114, 0, 0, 429, 430, 5, 116, 0, 0, 430, 431, 1, 0, 0, 0, 431, 432, 6, 15, 0, 0, 432, 41, 1, 0, 0, 0, 433, 434, 5, 115, 0, 0, 434, 435, 5, 116, 0, 0, 435, 436, 5, 97, 0, 0, 436, 437, 5, 116, 0, 0, 437, 438, 5, 115, 0, 0, 438, 439, 1, 0, 0, 0, 439, 440, 6, 16, 0, 0, 440, 43, 1, 0, 0, 0, 441, 442, 5, 119, 0, 0, 442, 443, 5, 104, 0, 0, 443, 444, 5, 101, 0, 0, 444, 445, 5, 114, 0, 0, 445, 446, 5, 101, 0, 0, 446, 447, 1, 0, 0, 0, 447, 448, 6, 17, 0, 0, 448, 45, 1, 0, 0, 0, 449, 451, 8, 0, 0, 0, 450, 449, 1, 0, 0, 0, 451, 452, 1, 0, 0, 0, 452, 450, 1, 0, 0, 0, 452, 453, 1, 0, 0, 0, 453, 454, 1, 0, 0, 0, 454, 455, 6, 18, 0, 0, 455, 47, 1, 0, 0, 0, 456, 457, 5, 47, 0, 0, 457, 458, 5, 47, 0, 0, 458, 462, 1, 0, 0, 0, 459, 461, 8, 1, 0, 0, 460, 459, 1, 0, 0, 0, 461, 464, 1, 0, 0, 0, 462, 460, 1, 0, 0, 0, 462, 463, 1, 0, 0, 0, 463, 466, 1, 0, 0, 0, 464, 462, 1, 0, 0, 0, 465, 467, 5, 13, 0, 0, 466, 465, 1, 0, 0, 0, 466, 467, 1, 0, 0, 0, 467, 469, 1, 0, 0, 0, 468, 470, 5, 10, 0, 0, 469, 468, 1, 0, 0, 0, 469, 470, 1, 0, 0, 0, 470, 471, 1, 0, 0, 0, 471, 472, 6, 19, 8, 0, 472, 49, 1, 0, 0, 0, 473, 474, 5, 47, 0, 0, 474, 475, 5, 42, 0, 0, 475, 480, 1, 0, 0, 0, 476, 479, 3, 50, 20, 0, 477, 479, 9, 0, 0, 0, 478, 476, 1, 0, 0, 0, 478, 477, 1, 0, 0, 0, 479, 482, 1, 0, 0, 0, 480, 481, 1, 0, 0, 0, 480, 478, 1, 0, 0, 0, 481, 483, 1, 0, 0, 0, 482, 480, 1, 0, 0, 0, 483, 484, 5, 42, 0, 0, 484, 485, 5, 47, 0, 0, 485, 486, 1, 0, 0, 0, 486, 487, 6, 20, 8, 0, 487, 51, 1, 0, 0, 0, 488, 490, 7, 2, 0, 0, 489, 488, 1, 0, 0, 0, 490, 491, 1, 0, 0, 0, 491, 489, 1, 0, 0, 0, 491, 492, 1, 0, 0, 0, 492, 493, 1, 0, 0, 0, 493, 494, 6, 21, 8, 0, 494, 53, 1, 0, 0, 0, 495, 496, 3, 158, 74, 0, 496, 497, 1, 0, 0, 0, 497, 498, 6, 22, 9, 0, 498, 499, 6, 22, 10, 0, 499, 55, 1, 0, 0, 0, 500, 501, 3, 64, 27, 0, 501, 502, 1, 0, 0, 0, 502, 503, 6, 23, 11, 0, 503, 504, 6, 23, 12, 0, 504, 57, 1, 0, 0, 0, 505, 506, 3, 52, 21, 0, 506, 507, 1, 0, 0, 0, 507, 508, 6, 24, 8, 0, 508, 59, 1, 0, 0, 0, 509, 510, 3, 48, 19, 0, 510, 511, 1, 0, 0, 0, 511, 512, 6, 25, 8, 0, 512, 61, 1, 0, 0, 0, 513, 514, 3, 50, 20, 0, 514, 515, 1, 0, 0, 0, 515, 516, 6, 26, 8, 0, 516, 63, 1, 0, 0, 0, 517, 518, 5, 124, 0, 0, 518, 519, 1, 0, 0, 0, 519, 520, 6, 27, 12, 0, 520, 65, 1, 0, 0, 0, 521, 522, 7, 3, 0, 0, 522, 67, 1, 0, 0, 0, 523, 524, 7, 4, 0, 0, 524, 69, 1, 0, 0, 0, 525, 526, 5, 92, 0, 0, 526, 527, 7, 5, 0, 0, 527, 71, 1, 0, 0, 0, 528, 529, 8, 6, 0, 0, 529, 73, 1, 0, 0, 0, 530, 532, 7, 7, 0, 0, 531, 533, 7, 8, 0, 0, 532, 531, 1, 0, 0, 0, 532, 533, 1, 0, 0, 0, 533, 535, 1, 0, 0, 0, 534, 536, 3, 66, 28, 0, 535, 534, 1, 0, 0, 0, 536, 537, 1, 0, 0, 0, 537, 535, 1, 0, 0, 0, 537, 538, 1, 0, 0, 0, 538, 75, 1, 0, 0, 0, 539, 540, 5, 64, 0, 0, 540, 77, 1, 0, 0, 0, 541, 542, 5, 96, 0, 0, 542, 79, 1, 0, 0, 0, 543, 547, 8, 9, 0, 0, 544, 545, 5, 96, 0, 0, 545, 547, 5, 96, 0, 0, 546, 543, 1, 0, 0, 0, 546, 544, 1, 0, 0, 0, 547, 81, 1, 0, 0, 0, 548, 549, 5, 95, 0, 0, 549, 83, 1, 0, 0, 0, 550, 554, 3, 68, 29, 0, 551, 554, 3, 66, 28, 0, 552, 554, 3, 82, 36, 0, 553, 550, 1, 0, 0, 0, 553, 551, 1, 0, 0, 0, 553, 552, 1, 0, 0, 0, 554, 85, 1, 0, 0, 0, 555, 560, 5, 34, 0, 0, 556, 559, 3, 70, 30, 0, 557, 559, 3, 72, 31, 0, 558, 556, 1, 0, 0, 0, 558, 557, 1, 0, 0, 0, 559, 562, 1, 0, 0, 0, 560, 558, 1, 0, 0, 0, 560, 561, 1, 0, 0, 0, 561, 563, 1, 0, 0, 0, 562, 560, 1, 0, 0, 0, 563, 585, 5, 34, 0, 0, 564, 565, 5, 34, 0, 0, 565, 566, 5, 34, 0, 0, 566, 567, 5, 34, 0, 0, 567, 571, 1, 0, 0, 0, 568, 570, 8, 1, 0, 0, 569, 568, 1, 0, 0, 0, 570, 573, 1, 0, 0, 0, 571, 572, 1, 0, 0, 0, 571, 569, 1, 0, 0, 0, 572, 574, 1, 0, 0, 0, 573, 571, 1, 0, 0, 0, 574, 575, 5, 34, 0, 0, 575, 576, 5, 34, 0, 0, 576, 577, 5, 34, 0, 0, 577, 579, 1, 0, 0, 0, 578, 580, 5, 34, 0, 0, 579, 578, 1, 0, 0, 0, 579, 580, 1, 0, 0, 0, 580, 582, 1, 0, 0, 0, 581, 583, 5, 34, 0, 0, 582, 581, 1, 0, 0, 0, 582, 583, 1, 0, 0, 0, 583, 585, 1, 0, 0, 0, 584, 555, 1, 0, 0, 0, 584, 564, 1, 0, 0, 0, 585, 87, 1, 0, 0, 0, 586, 588, 3, 66, 28, 0, 587, 586, 1, 0, 0, 0, 588, 589, 1, 0, 0, 0, 589, 587, 1, 0, 0, 0, 589, 590, 1, 0, 0, 0, 590, 89, 1, 0, 0, 0, 591, 593, 3, 66, 28, 0, 592, 591, 1, 0, 0, 0, 593, 594, 1, 0, 0, 0, 594, 592, 1, 0, 0, 0, 594, 595, 1, 0, 0, 0, 595, 596, 1, 0, 0, 0, 596, 600, 3, 104, 47, 0, 597, 599, 3, 66, 28, 0, 598, 597, 1, 0, 0, 0, 599, 602, 1, 0, 0, 0, 600, 598, 1, 0, 0, 0, 600, 601, 1, 0, 0, 0, 601, 634, 1, 0, 0, 0, 602, 600, 1, 0, 0, 0, 603, 605, 3, 104, 47, 0, 604, 606, 3, 66, 28, 0, 605, 604, 1, 0, 0, 0, 606, 607, 1, 0, 0, 0, 607, 605, 1, 0, 0, 0, 607, 608, 1, 0, 0, 0, 608, 634, 1, 0, 0, 0, 609, 611, 3, 66, 28, 0, 610, 609, 1, 0, 0, 0, 611, 612, 1, 0, 0, 0, 612, 610, 1, 0, 0, 0, 612, 613, 1, 0, 0, 0, 613, 621, 1, 0, 0, 0, 614, 618, 3, 104, 47, 0, 615, 617, 3, 66, 28, 0, 616, 615, 1, 0, 0, 0, 617, 620, 1, 0, 0, 0, 618, 616, 1, 0, 0, 0, 618, 619, 1, 0, 0, 0, 619, 622, 1, 0, 0, 0, 620, 618, 1, 0, 0, 0, 621, 614, 1, 0, 0, 0, 621, 622, 1, 0, 0, 0, 622, 623, 1, 0, 0, 0, 623, 624, 3, 74, 32, 0, 624, 634, 1, 0, 0, 0, 625, 627, 3, 104, 47, 0, 626, 628, 3, 66, 28, 0, 627, 626, 1, 0, 0, 0, 628, 629, 1, 0, 0, 0, 629, 627, 1, 0, 0, 0, 629, 630, 1, 0, 0, 0, 630, 631, 1, 0, 0, 0, 631, 632, 3, 74, 32, 0, 632, 634, 1, 0, 0, 0, 633, 592, 1, 0, 0, 0, 633, 603, 1, 0, 0, 0, 633, 610, 1, 0, 0, 0, 633, 625, 1, 0, 0, 0, 634, 91, 1, 0, 0, 0, 635, 636, 5, 98, 0, 0, 636, 637, 5, 121, 0, 0, 637, 93, 1, 0, 0, 0, 638, 639, 5, 97, 0, 0, 639, 640, 5, 110, 0, 0, 640, 641, 5, 100, 0, 0, 641, 95, 1, 0, 0, 0, 642, 643, 5, 97, 0, 0, 643, 644, 5, 115, 0, 0, 644, 645, 5, 99, 0, 0, 645, 97, 1, 0, 0, 0, 646, 647, 5, 61, 0, 0, 647, 99, 1, 0, 0, 0, 648, 649, 5, 44, 0, 0, 649, 101, 1, 0, 0, 0, 650, 651, 5, 100, 0, 0, 651, 652, 5, 101, 0, 0, 652, 653, 5, 115, 0, 0, 653, 654, 5, 99, 0, 0, 654, 103, 1, 0, 0, 0, 655, 656, 5, 46, 0, 0, 656, 105, 1, 0, 0, 0, 657, 658, 5, 102, 0, 0, 658, 659, 5, 97, 0, 0, 659, 660, 5, 108, 0, 0, 660, 661, 5, 115, 0, 0, 661, 662, 5, 101, 0, 0, 662, 107, 1, 0, 0, 0, 663, 664, 5, 102, 0, 0, 664, 665, 5, 105, 0, 0, 665, 666, 5, 114, 0, 0, 666, 667, 5, 115, 0, 0, 667, 668, 5, 116, 0, 0, 668, 109, 1, 0, 0, 0, 669, 670, 5, 108, 0, 0, 670, 671, 5, 97, 0, 0, 671, 672, 5, 115, 0, 0, 672, 673, 5, 116, 0, 0, 673, 111, 1, 0, 0, 0, 674, 675, 5, 40, 0, 0, 675, 113, 1, 0, 0, 0, 676, 677, 5, 105, 0, 0, 677, 678, 5, 110, 0, 0, 678, 115, 1, 0, 0, 0, 679, 680, 5, 105, 0, 0, 680, 681, 5, 115, 0, 0, 681, 117, 1, 0, 0, 0, 682, 683, 5, 108, 0, 0, 683, 684, 5, 105, 0, 0, 684, 685, 5, 107, 0, 0, 685, 686, 5, 101, 0, 0, 686, 119, 1, 0, 0, 0, 687, 688, 5, 110, 0, 0, 688, 689, 5, 111, 0, 0, 689, 690, 5, 116, 0, 0, 690, 121, 1, 0, 0, 0, 691, 692, 5, 110, 0, 0, 692, 693, 5, 117, 0, 0, 693, 694, 5, 108, 0, 0, 694, 695, 5, 108, 0, 0, 695, 123, 1, 0, 0, 0, 696, 697, 5, 110, 0, 0, 697, 698, 5, 117, 0, 0, 698, 699, 5, 108, 0, 0, 699, 700, 5, 108, 0, 0, 700, 701, 5, 115, 0, 0, 701, 125, 1, 0, 0, 0, 702, 703, 5, 111, 0, 0, 703, 704, 5, 114, 0, 0, 704, 127, 1, 0, 0, 0, 705, 706, 5, 63, 0, 0, 706, 129, 1, 0, 0, 0, 707, 708, 5, 114, 0, 0, 708, 709, 5, 108, 0, 0, 709, 710, 5, 105, 0, 0, 710, 711, 5, 107, 0, 0, 711, 712, 5, 101, 0, 0, 712, 131, 1, 0, 0, 0, 713, 714, 5, 41, 0, 0, 714, 133, 1, 0, 0, 0, 715, 716, 5, 116, 0, 0, 716, 717, 5, 114, 0, 0, 717, 718, 5, 117, 0, 0, 718, 719, 5, 101, 0, 0, 719, 135, 1, 0, 0, 0, 720, 721, 5, 61, 0, 0, 721, 722, 5, 61, 0, 0, 722, 137, 1, 0, 0, 0, 723, 724, 5, 33, 0, 0, 724, 725, 5, 61, 0, 0, 725, 139, 1, 0, 0, 0, 726, 727, 5, 60, 0, 0, 727, 141, 1, 0, 0, 0, 728, 729, 5, 60, 0, 0, 729, 730, 5, 61, 0, 0, 730, 143, 1, 0, 0, 0, 731, 732, 5, 62, 0, 0, 732, 145, 1, 0, 0, 0, 733, 734, 5, 62, 0, 0, 734, 735, 5, 61, 0, 0, 735, 147, 1, 0, 0, 0, 736, 737, 5, 43, 0, 0, 737, 149, 1, 0, 0, 0, 738, 739, 5, 45, 0, 0, 739, 151, 1, 0, 0, 0, 740, 741, 5, 42, 0, 0, 741, 153, 1, 0, 0, 0, 742, 743, 5, 47, 0, 0, 743, 155, 1, 0, 0, 0, 744, 745, 5, 37, 0, 0, 745, 157, 1, 0, 0, 0, 746, 747, 5, 91, 0, 0, 747, 748, 1, 0, 0, 0, 748, 749, 6, 74, 0, 0, 749, 750, 6, 74, 0, 0, 750, 159, 1, 0, 0, 0, 751, 752, 5, 93, 0, 0, 752, 753, 1, 0, 0, 0, 753, 754, 6, 75, 12, 0, 754, 755, 6, 75, 12, 0, 755, 161, 1, 0, 0, 0, 756, 760, 3, 68, 29, 0, 757, 759, 3, 84, 37, 0, 758, 757, 1, 0, 0, 0, 759, 762, 1, 0, 0, 0, 760, 758, 1, 0, 0, 0, 760, 761, 1, 0, 0, 0, 761, 773, 1, 0, 0, 0, 762, 760, 1, 0, 0, 0, 763, 766, 3, 82, 36, 0, 764, 766, 3, 76, 33, 0, 765, 763, 1, 0, 0, 0, 765, 764, 1, 0, 0, 0, 766, 768, 1, 0, 0, 0, 767, 769, 3, 84, 37, 0, 768, 767, 1, 0, 0, 0, 769, 770, 1, 0, 0, 0, 770, 768, 1, 0, 0, 0, 770, 771, 1, 0, 0, 0, 771, 773, 1, 0, 0, 0, 772, 756, 1, 0, 0, 0, 772, 765, 1, 0, 0, 0, 773, 163, 1, 0, 0, 0, 774, 776, 3, 78, 34, 0, 775, 777, 3, 80, 35, 0, 776, 775, 1, 0, 0, 0, 777, 778, 1, 0, 0, 0, 778, 776, 1, 0, 0, 0, 778, 779, 1, 0, 0, 0, 779, 780, 1, 0, 0, 0, 780, 781, 3, 78, 34, 0, 781, 165, 1, 0, 0, 0, 782, 783, 3, 48, 19, 0, 783, 784, 1, 0, 0, 0, 784, 785, 6, 78, 8, 0, 785, 167, 1, 0, 0, 0, 786, 787, 3, 50, 20, 0, 787, 788, 1, 0, 0, 0, 788, 789, 6, 79, 8, 0, 789, 169, 1, 0, 0, 0, 790, 791, 3, 52, 21, 0, 791, 792, 1, 0, 0, 0, 792, 793, 6, 80, 8, 0, 793, 171, 1, 0, 0, 0, 794, 795, 3, 64, 27, 0, 795, 796, 1, 0, 0, 0, 796, 797, 6, 81, 11, 0, 797, 798, 6, 81, 12, 0, 798, 173, 1, 0, 0, 0, 799, 800, 3, 158, 74, 0, 800, 801, 1, 0, 0, 0, 801, 802, 6, 82, 9, 0, 802, 803, 6, 82, 4, 0, 803, 804, 6, 82, 4, 0, 804, 175, 1, 0, 0, 0, 805, 806, 3, 160, 75, 0, 806, 807, 1, 0, 0, 0, 807, 808, 6, 83, 13, 0, 808, 809, 6, 83, 12, 0, 809, 810, 6, 83, 12, 0, 810, 177, 1, 0, 0, 0, 811, 812, 3, 100, 45, 0, 812, 813, 1, 0, 0, 0, 813, 814, 6, 84, 14, 0, 814, 179, 1, 0, 0, 0, 815, 816, 3, 98, 44, 0, 816, 817, 1, 0, 0, 0, 817, 818, 6, 85, 15, 0, 818, 181, 1, 0, 0, 0, 819, 820, 5, 109, 0, 0, 820, 821, 5, 101, 0, 0, 821, 822, 5, 116, 0, 0, 822, 823, 5, 97, 0, 0, 823, 824, 5, 100, 0, 0, 824, 825, 5, 97, 0, 0, 825, 826, 5, 116, 0, 0, 826, 827, 5, 97, 0, 0, 827, 183, 1, 0, 0, 0, 828, 832, 8, 10, 0, 0, 829, 830, 5, 47, 0, 0, 830, 832, 8, 11, 0, 0, 831, 828, 1, 0, 0, 0, 831, 829, 1, 0, 0, 0, 832, 185, 1, 0, 0, 0, 833, 835, 3, 184, 87, 0, 834, 833, 1, 0, 0, 0, 835, 836, 1, 0, 0, 0, 836, 834, 1, 0, 0, 0, 836, 837, 1, 0, 0, 0, 837, 187, 1, 0, 0, 0, 838, 839, 3, 164, 77, 0, 839, 840, 1, 0, 0, 0, 840, 841, 6, 89, 16, 0, 841, 189, 1, 0, 0, 0, 842, 843, 3, 48, 19, 0, 843, 844, 1, 0, 0, 0, 844, 845, 6, 90, 8, 0, 845, 191, 1, 0, 0, 0, 846, 847, 3, 50, 20, 0, 847, 848, 1, 0, 0, 0, 848, 849, 6, 91, 8, 0, 849, 193, 1, 0, 0, 0, 850, 851, 3, 52, 21, 0, 851, 852, 1, 0, 0, 0, 852, 853, 6, 92, 8, 0, 853, 195, 1, 0, 0, 0, 854, 855, 3, 64, 27, 0, 855, 856, 1, 0, 0, 0, 856, 857, 6, 93, 11, 0, 857, 858, 6, 93, 12, 0, 858, 197, 1, 0, 0, 0, 859, 860, 3, 104, 47, 0, 860, 861, 1, 0, 0, 0, 861, 862, 6, 94, 17, 0, 862, 199, 1, 0, 0, 0, 863, 864, 3, 100, 45, 0, 864, 865, 1, 0, 0, 0, 865, 866, 6, 95, 14, 0, 866, 201, 1, 0, 0, 0, 867, 872, 3, 68, 29, 0, 868, 872, 3, 66, 28, 0, 869, 872, 3, 82, 36, 0, 870, 872, 3, 152, 71, 0, 871, 867, 1, 0, 0, 0, 871, 868, 1, 0, 0, 0, 871, 869, 1, 0, 0, 0, 871, 870, 1, 0, 0, 0, 872, 203, 1, 0, 0, 0, 873, 876, 3, 68, 29, 0, 874, 876, 3, 152, 71, 0, 875, 873, 1, 0, 0, 0, 875, 874, 1, 0, 0, 0, 876, 880, 1, 0, 0, 0, 877, 879, 3, 202, 96, 0, 878, 877, 1, 0, 0, 0, 879, 882, 1, 0, 0, 0, 880, 878, 1, 0, 0, 0, 880, 881, 1, 0, 0, 0, 881, 893, 1, 0, 0, 0, 882, 880, 1, 0, 0, 0, 883, 886, 3, 82, 36, 0, 884, 886, 3, 76, 33, 0, 885, 883, 1, 0, 0, 0, 885, 884, 1, 0, 0, 0, 886, 888, 1, 0, 0, 0, 887, 889, 3, 202, 96, 0, 888, 887, 1, 0, 0, 0, 889, 890, 1, 0, 0, 0, 890, 888, 1, 0, 0, 0, 890, 891, 1, 0, 0, 0, 891, 893, 1, 0, 0, 0, 892, 875, 1, 0, 0, 0, 892, 885, 1, 0, 0, 0, 893, 205, 1, 0, 0, 0, 894, 895, 3, 164, 77, 0, 895, 896, 1, 0, 0, 0, 896, 897, 6, 98, 16, 0, 897, 207, 1, 0, 0, 0, 898, 899, 3, 48, 19, 0, 899, 900, 1, 0, 0, 0, 900, 901, 6, 99, 8, 0, 901, 209, 1, 0, 0, 0, 902, 903, 3, 50, 20, 0, 903, 904, 1, 0, 0, 0, 904, 905, 6, 100, 8, 0, 905, 211, 1, 0, 0, 0, 906, 907, 3, 52, 21, 0, 907, 908, 1, 0, 0, 0, 908, 909, 6, 101, 8, 0, 909, 213, 1, 0, 0, 0, 910, 911, 3, 64, 27, 0, 911, 912, 1, 0, 0, 0, 912, 913, 6, 102, 11, 0, 913, 914, 6, 102, 12, 0, 914, 215, 1, 0, 0, 0, 915, 916, 3, 98, 44, 0, 916, 917, 1, 0, 0, 0, 917, 918, 6, 103, 15, 0, 918, 217, 1, 0, 0, 0, 919, 920, 3, 100, 45, 0, 920, 921, 1, 0, 0, 0, 921, 922, 6, 104, 14, 0, 922, 219, 1, 0, 0, 0, 923, 924, 3, 104, 47, 0, 924, 925, 1, 0, 0, 0, 925, 926, 6, 105, 17, 0, 926, 221, 1, 0, 0, 0, 927, 928, 5, 97, 0, 0, 928, 929, 5, 115, 0, 0, 929, 223, 1, 0, 0, 0, 930, 931, 3, 164, 77, 0, 931, 932, 1, 0, 0, 0, 932, 933, 6, 107, 16, 0, 933, 225, 1, 0, 0, 0, 934, 935, 3, 204, 97, 0, 935, 936, 1, 0, 0, 0, 936, 937, 6, 108, 18, 0, 937, 227, 1, 0, 0, 0, 938, 939, 3, 48, 19, 0, 939, 940, 1, 0, 0, 0, 940, 941, 6, 109, 8, 0, 941, 229, 1, 0, 0, 0, 942, 943, 3, 50, 20, 0, 943, 944, 1, 0, 0, 0, 944, 945, 6, 110, 8, 0, 945, 231, 1, 0, 0, 0, 946, 947, 3, 52, 21, 0, 947, 948, 1, 0, 0, 0, 948, 949, 6, 111, 8, 0, 949, 233, 1, 0, 0, 0, 950, 951, 3, 64, 27, 0, 951, 952, 1, 0, 0, 0, 952, 953, 6, 112, 11, 0, 953, 954, 6, 112, 12, 0, 954, 235, 1, 0, 0, 0, 955, 956, 5, 111, 0, 0, 956, 957, 5, 110, 0, 0, 957, 958, 1, 0, 0, 0, 958, 959, 6, 113, 19, 0, 959, 237, 1, 0, 0, 0, 960, 961, 5, 119, 0, 0, 961, 962, 5, 105, 0, 0, 962, 963, 5, 116, 0, 0, 963, 964, 5, 104, 0, 0, 964, 965, 1, 0, 0, 0, 965, 966, 6, 114, 19, 0, 966, 239, 1, 0, 0, 0, 967, 968, 3, 186, 88, 0, 968, 969, 1, 0, 0, 0, 969, 970, 6, 115, 20, 0, 970, 241, 1, 0, 0, 0, 971, 972, 3, 164, 77, 0, 972, 973, 1, 0, 0, 0, 973, 974, 6, 116, 16, 0, 974, 243, 1, 0, 0, 0, 975, 976, 3, 48, 19, 0, 976, 977, 1, 0, 0, 0, 977, 978, 6, 117, 8, 0, 978, 245, 1, 0, 0, 0, 979, 980, 3, 50, 20, 0, 980, 981, 1, 0, 0, 0, 981, 982, 6, 118, 8, 0, 982, 247, 1, 0, 0, 0, 983, 984, 3, 52, 21, 0, 984, 985, 1, 0, 0, 0, 985, 986, 6, 119, 8, 0, 986, 249, 1, 0, 0, 0, 987, 988, 3, 64, 27, 0, 988, 989, 1, 0, 0, 0, 989, 990, 6, 120, 11, 0, 990, 991, 6, 120, 12, 0, 991, 992, 6, 120, 12, 0, 992, 251, 1, 0, 0, 0, 993, 994, 3, 98, 44, 0, 994, 995, 1, 0, 0, 0, 995, 996, 6, 121, 15, 0, 996, 253, 1, 0, 0, 0, 997, 998, 3, 100, 45, 0, 998, 999, 1, 0, 0, 0, 999, 1000, 6, 122, 14, 0, 1000, 255, 1, 0, 0, 0, 1001, 1002, 3, 104, 47, 0, 1002, 1003, 1, 0, 0, 0, 1003, 1004, 6, 123, 17, 0, 1004, 257, 1, 0, 0, 0, 1005, 1006, 3, 238, 114, 0, 1006, 1007, 1, 0, 0, 0, 1007, 1008, 6, 124, 21, 0, 1008, 259, 1, 0, 0, 0, 1009, 1010, 3, 204, 97, 0, 1010, 1011, 1, 0, 0, 0, 1011, 1012, 6, 125, 18, 0, 1012, 261, 1, 0, 0, 0, 1013, 1014, 3, 164, 77, 0, 1014, 1015, 1, 0, 0, 0, 1015, 1016, 6, 126, 16, 0, 1016, 263, 1, 0, 0, 0, 1017, 1018, 3, 48, 19, 0, 1018, 1019, 1, 0, 0, 0, 1019, 1020, 6, 127, 8, 0, 1020, 265, 1, 0, 0, 0, 1021, 1022, 3, 50, 20, 0, 1022, 1023, 1, 0, 0, 0, 1023, 1024, 6, 128, 8, 0, 1024, 267, 1, 0, 0, 0, 1025, 1026, 3, 52, 21, 0, 1026, 1027, 1, 0, 0, 0, 1027, 1028, 6, 129, 8, 0, 1028, 269, 1, 0, 0, 0, 1029, 1030, 3, 64, 27, 0, 1030, 1031, 1, 0, 0, 0, 1031, 1032, 6, 130, 11, 0, 1032, 1033, 6, 130, 12, 0, 1033, 271, 1, 0, 0, 0, 1034, 1035, 3, 104, 47, 0, 1035, 1036, 1, 0, 0, 0, 1036, 1037, 6, 131, 17, 0, 1037, 273, 1, 0, 0, 0, 1038, 1039, 3, 164, 77, 0, 1039, 1040, 1, 0, 0, 0, 1040, 1041, 6, 132, 16, 0, 1041, 275, 1, 0, 0, 0, 1042, 1043, 3, 162, 76, 0, 1043, 1044, 1, 0, 0, 0, 1044, 1045, 6, 133, 22, 0, 1045, 277, 1, 0, 0, 0, 1046, 1047, 3, 48, 19, 0, 1047, 1048, 1, 0, 0, 0, 1048, 1049, 6, 134, 8, 0, 1049, 279, 1, 0, 0, 0, 1050, 1051, 3, 50, 20, 0, 1051, 1052, 1, 0, 0, 0, 1052, 1053, 6, 135, 8, 0, 1053, 281, 1, 0, 0, 0, 1054, 1055, 3, 52, 21, 0, 1055, 1056, 1, 0, 0, 0, 1056, 1057, 6, 136, 8, 0, 1057, 283, 1, 0, 0, 0, 1058, 1059, 3, 64, 27, 0, 1059, 1060, 1, 0, 0, 0, 1060, 1061, 6, 137, 11, 0, 1061, 1062, 6, 137, 12, 0, 1062, 285, 1, 0, 0, 0, 1063, 1064, 5, 105, 0, 0, 1064, 1065, 5, 110, 0, 0, 1065, 1066, 5, 102, 0, 0, 1066, 1067, 5, 111, 0, 0, 1067, 287, 1, 0, 0, 0, 1068, 1069, 5, 102, 0, 0, 1069, 1070, 5, 117, 0, 0, 1070, 1071, 5, 110, 0, 0, 1071, 1072, 5, 99, 0, 0, 1072, 1073, 5, 116, 0, 0, 1073, 1074, 5, 105, 0, 0, 1074, 1075, 5, 111, 0, 0, 1075, 1076, 5, 110, 0, 0, 1076, 1077, 5, 115, 0, 0, 1077, 289, 1, 0, 0, 0, 1078, 1079, 3, 48, 19, 0, 1079, 1080, 1, 0, 0, 0, 1080, 1081, 6, 140, 8, 0, 1081, 291, 1, 0, 0, 0, 1082, 1083, 3, 50, 20, 0, 1083, 1084, 1, 0, 0, 0, 1084, 1085, 6, 141, 8, 0, 1085, 293, 1, 0, 0, 0, 1086, 1087, 3, 52, 21, 0, 1087, 1088, 1, 0, 0, 0, 1088, 1089, 6, 142, 8, 0, 1089, 295, 1, 0, 0, 0, 49, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 452, 462, 466, 469, 478, 480, 491, 532, 537, 546, 553, 558, 560, 571, 579, 582, 584, 589, 594, 600, 607, 612, 618, 621, 629, 633, 760, 765, 770, 772, 778, 831, 836, 871, 875, 880, 885, 890, 892, 23, 5, 2, 0, 5, 4, 0, 5, 6, 0, 5, 1, 0, 5, 3, 0, 5, 8, 0, 5, 5, 0, 5, 9, 0, 0, 1, 0, 7, 63, 0, 5, 0, 0, 7, 26, 0, 4, 0, 0, 7, 64, 0, 7, 34, 0, 7, 33, 0, 7, 66, 0, 7, 36, 0, 7, 75, 0, 5, 7, 0, 7, 71, 0, 7, 84, 0, 7, 65, 0] \ No newline at end of file +[4, 0, 104, 1156, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 2, 143, 7, 143, 2, 144, 7, 144, 2, 145, 7, 145, 2, 146, 7, 146, 2, 147, 7, 147, 2, 148, 7, 148, 2, 149, 7, 149, 2, 150, 7, 150, 2, 151, 7, 151, 2, 152, 7, 152, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 4, 18, 472, 8, 18, 11, 18, 12, 18, 473, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 5, 19, 482, 8, 19, 10, 19, 12, 19, 485, 9, 19, 1, 19, 3, 19, 488, 8, 19, 1, 19, 3, 19, 491, 8, 19, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 5, 20, 500, 8, 20, 10, 20, 12, 20, 503, 9, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 21, 4, 21, 511, 8, 21, 11, 21, 12, 21, 512, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 1, 27, 1, 27, 1, 28, 1, 28, 1, 29, 1, 29, 1, 30, 1, 30, 1, 30, 1, 31, 1, 31, 1, 32, 1, 32, 3, 32, 554, 8, 32, 1, 32, 4, 32, 557, 8, 32, 11, 32, 12, 32, 558, 1, 33, 1, 33, 1, 34, 1, 34, 1, 35, 1, 35, 1, 35, 3, 35, 568, 8, 35, 1, 36, 1, 36, 1, 37, 1, 37, 1, 37, 3, 37, 575, 8, 37, 1, 38, 1, 38, 1, 38, 5, 38, 580, 8, 38, 10, 38, 12, 38, 583, 9, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 5, 38, 591, 8, 38, 10, 38, 12, 38, 594, 9, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 3, 38, 601, 8, 38, 1, 38, 3, 38, 604, 8, 38, 3, 38, 606, 8, 38, 1, 39, 4, 39, 609, 8, 39, 11, 39, 12, 39, 610, 1, 40, 4, 40, 614, 8, 40, 11, 40, 12, 40, 615, 1, 40, 1, 40, 5, 40, 620, 8, 40, 10, 40, 12, 40, 623, 9, 40, 1, 40, 1, 40, 4, 40, 627, 8, 40, 11, 40, 12, 40, 628, 1, 40, 4, 40, 632, 8, 40, 11, 40, 12, 40, 633, 1, 40, 1, 40, 5, 40, 638, 8, 40, 10, 40, 12, 40, 641, 9, 40, 3, 40, 643, 8, 40, 1, 40, 1, 40, 1, 40, 1, 40, 4, 40, 649, 8, 40, 11, 40, 12, 40, 650, 1, 40, 1, 40, 3, 40, 655, 8, 40, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 62, 1, 62, 1, 62, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 1, 64, 1, 64, 1, 64, 1, 65, 1, 65, 1, 66, 1, 66, 1, 66, 1, 67, 1, 67, 1, 68, 1, 68, 1, 68, 1, 69, 1, 69, 1, 70, 1, 70, 1, 71, 1, 71, 1, 72, 1, 72, 1, 73, 1, 73, 1, 74, 1, 74, 1, 74, 1, 74, 1, 74, 1, 75, 1, 75, 1, 75, 1, 75, 1, 75, 1, 76, 1, 76, 5, 76, 780, 8, 76, 10, 76, 12, 76, 783, 9, 76, 1, 76, 1, 76, 3, 76, 787, 8, 76, 1, 76, 4, 76, 790, 8, 76, 11, 76, 12, 76, 791, 3, 76, 794, 8, 76, 1, 77, 1, 77, 4, 77, 798, 8, 77, 11, 77, 12, 77, 799, 1, 77, 1, 77, 1, 78, 1, 78, 1, 78, 1, 78, 1, 79, 1, 79, 1, 79, 1, 79, 1, 80, 1, 80, 1, 80, 1, 80, 1, 81, 1, 81, 1, 81, 1, 81, 1, 81, 1, 82, 1, 82, 1, 82, 1, 82, 1, 83, 1, 83, 1, 83, 1, 83, 1, 84, 1, 84, 1, 84, 1, 84, 1, 85, 1, 85, 1, 85, 1, 85, 1, 86, 1, 86, 1, 86, 1, 86, 1, 86, 1, 86, 1, 86, 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 3, 87, 849, 8, 87, 1, 88, 4, 88, 852, 8, 88, 11, 88, 12, 88, 853, 1, 89, 1, 89, 1, 89, 1, 89, 1, 90, 1, 90, 1, 90, 1, 90, 1, 91, 1, 91, 1, 91, 1, 91, 1, 92, 1, 92, 1, 92, 1, 92, 1, 93, 1, 93, 1, 93, 1, 93, 1, 93, 1, 94, 1, 94, 1, 94, 1, 94, 1, 95, 1, 95, 1, 95, 1, 95, 1, 96, 1, 96, 1, 96, 1, 96, 3, 96, 889, 8, 96, 1, 97, 1, 97, 3, 97, 893, 8, 97, 1, 97, 5, 97, 896, 8, 97, 10, 97, 12, 97, 899, 9, 97, 1, 97, 1, 97, 3, 97, 903, 8, 97, 1, 97, 4, 97, 906, 8, 97, 11, 97, 12, 97, 907, 3, 97, 910, 8, 97, 1, 98, 1, 98, 1, 98, 1, 98, 1, 99, 1, 99, 1, 99, 1, 99, 1, 100, 1, 100, 1, 100, 1, 100, 1, 101, 1, 101, 1, 101, 1, 101, 1, 102, 1, 102, 1, 102, 1, 102, 1, 103, 1, 103, 1, 103, 1, 103, 1, 103, 1, 104, 1, 104, 1, 104, 1, 104, 1, 105, 1, 105, 1, 105, 1, 105, 1, 106, 1, 106, 1, 106, 1, 106, 1, 107, 1, 107, 1, 107, 1, 108, 1, 108, 1, 108, 1, 108, 1, 109, 1, 109, 1, 109, 1, 109, 1, 110, 1, 110, 1, 110, 1, 110, 1, 111, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 112, 1, 113, 1, 113, 1, 113, 1, 113, 1, 113, 1, 114, 1, 114, 1, 114, 1, 114, 1, 114, 1, 115, 1, 115, 1, 115, 1, 115, 1, 115, 1, 116, 1, 116, 1, 116, 1, 116, 1, 116, 1, 116, 1, 116, 1, 117, 1, 117, 1, 118, 1, 118, 3, 118, 998, 8, 118, 1, 118, 5, 118, 1001, 8, 118, 10, 118, 12, 118, 1004, 9, 118, 1, 119, 1, 119, 1, 119, 1, 119, 1, 120, 1, 120, 1, 120, 1, 120, 1, 121, 1, 121, 1, 121, 1, 121, 1, 122, 1, 122, 1, 122, 1, 122, 1, 123, 1, 123, 1, 123, 1, 123, 1, 124, 1, 124, 1, 124, 1, 124, 1, 124, 1, 124, 1, 125, 1, 125, 1, 125, 1, 125, 1, 126, 1, 126, 1, 126, 1, 126, 1, 127, 1, 127, 1, 127, 1, 127, 1, 128, 1, 128, 1, 128, 1, 128, 1, 129, 1, 129, 1, 129, 1, 129, 1, 130, 1, 130, 1, 130, 1, 130, 1, 131, 1, 131, 1, 131, 1, 131, 1, 132, 1, 132, 1, 132, 1, 132, 1, 133, 1, 133, 1, 133, 1, 133, 1, 134, 1, 134, 1, 134, 1, 134, 1, 134, 1, 135, 1, 135, 1, 135, 1, 135, 1, 136, 1, 136, 1, 136, 1, 136, 1, 137, 1, 137, 1, 137, 1, 137, 1, 138, 1, 138, 1, 138, 1, 138, 1, 139, 1, 139, 1, 139, 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 1, 142, 1, 142, 1, 142, 1, 143, 1, 143, 1, 143, 1, 143, 1, 143, 1, 143, 1, 143, 1, 143, 1, 143, 1, 143, 1, 144, 1, 144, 1, 144, 1, 144, 1, 145, 1, 145, 1, 145, 1, 145, 1, 146, 1, 146, 1, 146, 1, 146, 1, 147, 1, 147, 1, 147, 1, 147, 1, 147, 1, 148, 1, 148, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 4, 149, 1141, 8, 149, 11, 149, 12, 149, 1142, 1, 150, 1, 150, 1, 150, 1, 150, 1, 151, 1, 151, 1, 151, 1, 151, 1, 152, 1, 152, 1, 152, 1, 152, 2, 501, 592, 0, 153, 11, 1, 13, 2, 15, 3, 17, 4, 19, 5, 21, 6, 23, 7, 25, 8, 27, 9, 29, 10, 31, 11, 33, 12, 35, 13, 37, 14, 39, 15, 41, 16, 43, 17, 45, 18, 47, 19, 49, 20, 51, 21, 53, 22, 55, 0, 57, 0, 59, 23, 61, 24, 63, 25, 65, 26, 67, 0, 69, 0, 71, 0, 73, 0, 75, 0, 77, 0, 79, 0, 81, 0, 83, 0, 85, 0, 87, 27, 89, 28, 91, 29, 93, 30, 95, 31, 97, 32, 99, 33, 101, 34, 103, 35, 105, 36, 107, 37, 109, 38, 111, 39, 113, 40, 115, 41, 117, 42, 119, 43, 121, 44, 123, 45, 125, 46, 127, 47, 129, 48, 131, 49, 133, 50, 135, 51, 137, 52, 139, 53, 141, 54, 143, 55, 145, 56, 147, 57, 149, 58, 151, 59, 153, 60, 155, 61, 157, 62, 159, 63, 161, 64, 163, 65, 165, 66, 167, 67, 169, 68, 171, 69, 173, 0, 175, 0, 177, 0, 179, 0, 181, 0, 183, 70, 185, 0, 187, 71, 189, 0, 191, 72, 193, 73, 195, 74, 197, 0, 199, 0, 201, 0, 203, 0, 205, 75, 207, 0, 209, 0, 211, 76, 213, 77, 215, 78, 217, 0, 219, 0, 221, 0, 223, 0, 225, 79, 227, 0, 229, 0, 231, 80, 233, 81, 235, 82, 237, 0, 239, 0, 241, 83, 243, 84, 245, 0, 247, 85, 249, 0, 251, 0, 253, 86, 255, 87, 257, 88, 259, 0, 261, 0, 263, 0, 265, 0, 267, 0, 269, 0, 271, 0, 273, 89, 275, 90, 277, 91, 279, 0, 281, 0, 283, 0, 285, 0, 287, 92, 289, 93, 291, 94, 293, 0, 295, 95, 297, 96, 299, 97, 301, 98, 303, 99, 305, 0, 307, 100, 309, 101, 311, 102, 313, 103, 315, 104, 11, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 13, 6, 0, 9, 10, 13, 13, 32, 32, 47, 47, 91, 91, 93, 93, 2, 0, 10, 10, 13, 13, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 5, 0, 34, 34, 92, 92, 110, 110, 114, 114, 116, 116, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 2, 0, 69, 69, 101, 101, 2, 0, 43, 43, 45, 45, 1, 0, 96, 96, 10, 0, 9, 10, 13, 13, 32, 32, 44, 44, 47, 47, 61, 61, 91, 91, 93, 93, 96, 96, 124, 124, 2, 0, 42, 42, 47, 47, 11, 0, 9, 10, 13, 13, 32, 32, 34, 35, 44, 44, 47, 47, 58, 58, 60, 60, 62, 63, 92, 92, 124, 124, 1183, 0, 11, 1, 0, 0, 0, 0, 13, 1, 0, 0, 0, 0, 15, 1, 0, 0, 0, 0, 17, 1, 0, 0, 0, 0, 19, 1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0, 23, 1, 0, 0, 0, 0, 25, 1, 0, 0, 0, 0, 27, 1, 0, 0, 0, 0, 29, 1, 0, 0, 0, 0, 31, 1, 0, 0, 0, 0, 33, 1, 0, 0, 0, 0, 35, 1, 0, 0, 0, 0, 37, 1, 0, 0, 0, 0, 39, 1, 0, 0, 0, 0, 41, 1, 0, 0, 0, 0, 43, 1, 0, 0, 0, 0, 45, 1, 0, 0, 0, 0, 47, 1, 0, 0, 0, 0, 49, 1, 0, 0, 0, 0, 51, 1, 0, 0, 0, 0, 53, 1, 0, 0, 0, 1, 55, 1, 0, 0, 0, 1, 57, 1, 0, 0, 0, 1, 59, 1, 0, 0, 0, 1, 61, 1, 0, 0, 0, 1, 63, 1, 0, 0, 0, 2, 65, 1, 0, 0, 0, 2, 87, 1, 0, 0, 0, 2, 89, 1, 0, 0, 0, 2, 91, 1, 0, 0, 0, 2, 93, 1, 0, 0, 0, 2, 95, 1, 0, 0, 0, 2, 97, 1, 0, 0, 0, 2, 99, 1, 0, 0, 0, 2, 101, 1, 0, 0, 0, 2, 103, 1, 0, 0, 0, 2, 105, 1, 0, 0, 0, 2, 107, 1, 0, 0, 0, 2, 109, 1, 0, 0, 0, 2, 111, 1, 0, 0, 0, 2, 113, 1, 0, 0, 0, 2, 115, 1, 0, 0, 0, 2, 117, 1, 0, 0, 0, 2, 119, 1, 0, 0, 0, 2, 121, 1, 0, 0, 0, 2, 123, 1, 0, 0, 0, 2, 125, 1, 0, 0, 0, 2, 127, 1, 0, 0, 0, 2, 129, 1, 0, 0, 0, 2, 131, 1, 0, 0, 0, 2, 133, 1, 0, 0, 0, 2, 135, 1, 0, 0, 0, 2, 137, 1, 0, 0, 0, 2, 139, 1, 0, 0, 0, 2, 141, 1, 0, 0, 0, 2, 143, 1, 0, 0, 0, 2, 145, 1, 0, 0, 0, 2, 147, 1, 0, 0, 0, 2, 149, 1, 0, 0, 0, 2, 151, 1, 0, 0, 0, 2, 153, 1, 0, 0, 0, 2, 155, 1, 0, 0, 0, 2, 157, 1, 0, 0, 0, 2, 159, 1, 0, 0, 0, 2, 161, 1, 0, 0, 0, 2, 163, 1, 0, 0, 0, 2, 165, 1, 0, 0, 0, 2, 167, 1, 0, 0, 0, 2, 169, 1, 0, 0, 0, 2, 171, 1, 0, 0, 0, 3, 173, 1, 0, 0, 0, 3, 175, 1, 0, 0, 0, 3, 177, 1, 0, 0, 0, 3, 179, 1, 0, 0, 0, 3, 181, 1, 0, 0, 0, 3, 183, 1, 0, 0, 0, 3, 187, 1, 0, 0, 0, 3, 189, 1, 0, 0, 0, 3, 191, 1, 0, 0, 0, 3, 193, 1, 0, 0, 0, 3, 195, 1, 0, 0, 0, 4, 197, 1, 0, 0, 0, 4, 199, 1, 0, 0, 0, 4, 201, 1, 0, 0, 0, 4, 205, 1, 0, 0, 0, 4, 207, 1, 0, 0, 0, 4, 209, 1, 0, 0, 0, 4, 211, 1, 0, 0, 0, 4, 213, 1, 0, 0, 0, 4, 215, 1, 0, 0, 0, 5, 217, 1, 0, 0, 0, 5, 219, 1, 0, 0, 0, 5, 221, 1, 0, 0, 0, 5, 223, 1, 0, 0, 0, 5, 225, 1, 0, 0, 0, 5, 227, 1, 0, 0, 0, 5, 229, 1, 0, 0, 0, 5, 231, 1, 0, 0, 0, 5, 233, 1, 0, 0, 0, 5, 235, 1, 0, 0, 0, 6, 237, 1, 0, 0, 0, 6, 239, 1, 0, 0, 0, 6, 241, 1, 0, 0, 0, 6, 243, 1, 0, 0, 0, 6, 247, 1, 0, 0, 0, 6, 249, 1, 0, 0, 0, 6, 251, 1, 0, 0, 0, 6, 253, 1, 0, 0, 0, 6, 255, 1, 0, 0, 0, 6, 257, 1, 0, 0, 0, 7, 259, 1, 0, 0, 0, 7, 261, 1, 0, 0, 0, 7, 263, 1, 0, 0, 0, 7, 265, 1, 0, 0, 0, 7, 267, 1, 0, 0, 0, 7, 269, 1, 0, 0, 0, 7, 271, 1, 0, 0, 0, 7, 273, 1, 0, 0, 0, 7, 275, 1, 0, 0, 0, 7, 277, 1, 0, 0, 0, 8, 279, 1, 0, 0, 0, 8, 281, 1, 0, 0, 0, 8, 283, 1, 0, 0, 0, 8, 285, 1, 0, 0, 0, 8, 287, 1, 0, 0, 0, 8, 289, 1, 0, 0, 0, 8, 291, 1, 0, 0, 0, 9, 293, 1, 0, 0, 0, 9, 295, 1, 0, 0, 0, 9, 297, 1, 0, 0, 0, 9, 299, 1, 0, 0, 0, 9, 301, 1, 0, 0, 0, 9, 303, 1, 0, 0, 0, 10, 305, 1, 0, 0, 0, 10, 307, 1, 0, 0, 0, 10, 309, 1, 0, 0, 0, 10, 311, 1, 0, 0, 0, 10, 313, 1, 0, 0, 0, 10, 315, 1, 0, 0, 0, 11, 317, 1, 0, 0, 0, 13, 327, 1, 0, 0, 0, 15, 334, 1, 0, 0, 0, 17, 343, 1, 0, 0, 0, 19, 350, 1, 0, 0, 0, 21, 360, 1, 0, 0, 0, 23, 367, 1, 0, 0, 0, 25, 374, 1, 0, 0, 0, 27, 388, 1, 0, 0, 0, 29, 395, 1, 0, 0, 0, 31, 403, 1, 0, 0, 0, 33, 415, 1, 0, 0, 0, 35, 425, 1, 0, 0, 0, 37, 434, 1, 0, 0, 0, 39, 440, 1, 0, 0, 0, 41, 447, 1, 0, 0, 0, 43, 454, 1, 0, 0, 0, 45, 462, 1, 0, 0, 0, 47, 471, 1, 0, 0, 0, 49, 477, 1, 0, 0, 0, 51, 494, 1, 0, 0, 0, 53, 510, 1, 0, 0, 0, 55, 516, 1, 0, 0, 0, 57, 521, 1, 0, 0, 0, 59, 526, 1, 0, 0, 0, 61, 530, 1, 0, 0, 0, 63, 534, 1, 0, 0, 0, 65, 538, 1, 0, 0, 0, 67, 542, 1, 0, 0, 0, 69, 544, 1, 0, 0, 0, 71, 546, 1, 0, 0, 0, 73, 549, 1, 0, 0, 0, 75, 551, 1, 0, 0, 0, 77, 560, 1, 0, 0, 0, 79, 562, 1, 0, 0, 0, 81, 567, 1, 0, 0, 0, 83, 569, 1, 0, 0, 0, 85, 574, 1, 0, 0, 0, 87, 605, 1, 0, 0, 0, 89, 608, 1, 0, 0, 0, 91, 654, 1, 0, 0, 0, 93, 656, 1, 0, 0, 0, 95, 659, 1, 0, 0, 0, 97, 663, 1, 0, 0, 0, 99, 667, 1, 0, 0, 0, 101, 669, 1, 0, 0, 0, 103, 671, 1, 0, 0, 0, 105, 676, 1, 0, 0, 0, 107, 678, 1, 0, 0, 0, 109, 684, 1, 0, 0, 0, 111, 690, 1, 0, 0, 0, 113, 695, 1, 0, 0, 0, 115, 697, 1, 0, 0, 0, 117, 700, 1, 0, 0, 0, 119, 703, 1, 0, 0, 0, 121, 708, 1, 0, 0, 0, 123, 712, 1, 0, 0, 0, 125, 717, 1, 0, 0, 0, 127, 723, 1, 0, 0, 0, 129, 726, 1, 0, 0, 0, 131, 728, 1, 0, 0, 0, 133, 734, 1, 0, 0, 0, 135, 736, 1, 0, 0, 0, 137, 741, 1, 0, 0, 0, 139, 744, 1, 0, 0, 0, 141, 747, 1, 0, 0, 0, 143, 749, 1, 0, 0, 0, 145, 752, 1, 0, 0, 0, 147, 754, 1, 0, 0, 0, 149, 757, 1, 0, 0, 0, 151, 759, 1, 0, 0, 0, 153, 761, 1, 0, 0, 0, 155, 763, 1, 0, 0, 0, 157, 765, 1, 0, 0, 0, 159, 767, 1, 0, 0, 0, 161, 772, 1, 0, 0, 0, 163, 793, 1, 0, 0, 0, 165, 795, 1, 0, 0, 0, 167, 803, 1, 0, 0, 0, 169, 807, 1, 0, 0, 0, 171, 811, 1, 0, 0, 0, 173, 815, 1, 0, 0, 0, 175, 820, 1, 0, 0, 0, 177, 824, 1, 0, 0, 0, 179, 828, 1, 0, 0, 0, 181, 832, 1, 0, 0, 0, 183, 836, 1, 0, 0, 0, 185, 848, 1, 0, 0, 0, 187, 851, 1, 0, 0, 0, 189, 855, 1, 0, 0, 0, 191, 859, 1, 0, 0, 0, 193, 863, 1, 0, 0, 0, 195, 867, 1, 0, 0, 0, 197, 871, 1, 0, 0, 0, 199, 876, 1, 0, 0, 0, 201, 880, 1, 0, 0, 0, 203, 888, 1, 0, 0, 0, 205, 909, 1, 0, 0, 0, 207, 911, 1, 0, 0, 0, 209, 915, 1, 0, 0, 0, 211, 919, 1, 0, 0, 0, 213, 923, 1, 0, 0, 0, 215, 927, 1, 0, 0, 0, 217, 931, 1, 0, 0, 0, 219, 936, 1, 0, 0, 0, 221, 940, 1, 0, 0, 0, 223, 944, 1, 0, 0, 0, 225, 948, 1, 0, 0, 0, 227, 951, 1, 0, 0, 0, 229, 955, 1, 0, 0, 0, 231, 959, 1, 0, 0, 0, 233, 963, 1, 0, 0, 0, 235, 967, 1, 0, 0, 0, 237, 971, 1, 0, 0, 0, 239, 976, 1, 0, 0, 0, 241, 981, 1, 0, 0, 0, 243, 986, 1, 0, 0, 0, 245, 993, 1, 0, 0, 0, 247, 997, 1, 0, 0, 0, 249, 1005, 1, 0, 0, 0, 251, 1009, 1, 0, 0, 0, 253, 1013, 1, 0, 0, 0, 255, 1017, 1, 0, 0, 0, 257, 1021, 1, 0, 0, 0, 259, 1025, 1, 0, 0, 0, 261, 1031, 1, 0, 0, 0, 263, 1035, 1, 0, 0, 0, 265, 1039, 1, 0, 0, 0, 267, 1043, 1, 0, 0, 0, 269, 1047, 1, 0, 0, 0, 271, 1051, 1, 0, 0, 0, 273, 1055, 1, 0, 0, 0, 275, 1059, 1, 0, 0, 0, 277, 1063, 1, 0, 0, 0, 279, 1067, 1, 0, 0, 0, 281, 1072, 1, 0, 0, 0, 283, 1076, 1, 0, 0, 0, 285, 1080, 1, 0, 0, 0, 287, 1084, 1, 0, 0, 0, 289, 1088, 1, 0, 0, 0, 291, 1092, 1, 0, 0, 0, 293, 1096, 1, 0, 0, 0, 295, 1101, 1, 0, 0, 0, 297, 1106, 1, 0, 0, 0, 299, 1116, 1, 0, 0, 0, 301, 1120, 1, 0, 0, 0, 303, 1124, 1, 0, 0, 0, 305, 1128, 1, 0, 0, 0, 307, 1133, 1, 0, 0, 0, 309, 1140, 1, 0, 0, 0, 311, 1144, 1, 0, 0, 0, 313, 1148, 1, 0, 0, 0, 315, 1152, 1, 0, 0, 0, 317, 318, 5, 100, 0, 0, 318, 319, 5, 105, 0, 0, 319, 320, 5, 115, 0, 0, 320, 321, 5, 115, 0, 0, 321, 322, 5, 101, 0, 0, 322, 323, 5, 99, 0, 0, 323, 324, 5, 116, 0, 0, 324, 325, 1, 0, 0, 0, 325, 326, 6, 0, 0, 0, 326, 12, 1, 0, 0, 0, 327, 328, 5, 100, 0, 0, 328, 329, 5, 114, 0, 0, 329, 330, 5, 111, 0, 0, 330, 331, 5, 112, 0, 0, 331, 332, 1, 0, 0, 0, 332, 333, 6, 1, 1, 0, 333, 14, 1, 0, 0, 0, 334, 335, 5, 101, 0, 0, 335, 336, 5, 110, 0, 0, 336, 337, 5, 114, 0, 0, 337, 338, 5, 105, 0, 0, 338, 339, 5, 99, 0, 0, 339, 340, 5, 104, 0, 0, 340, 341, 1, 0, 0, 0, 341, 342, 6, 2, 2, 0, 342, 16, 1, 0, 0, 0, 343, 344, 5, 101, 0, 0, 344, 345, 5, 118, 0, 0, 345, 346, 5, 97, 0, 0, 346, 347, 5, 108, 0, 0, 347, 348, 1, 0, 0, 0, 348, 349, 6, 3, 0, 0, 349, 18, 1, 0, 0, 0, 350, 351, 5, 101, 0, 0, 351, 352, 5, 120, 0, 0, 352, 353, 5, 112, 0, 0, 353, 354, 5, 108, 0, 0, 354, 355, 5, 97, 0, 0, 355, 356, 5, 105, 0, 0, 356, 357, 5, 110, 0, 0, 357, 358, 1, 0, 0, 0, 358, 359, 6, 4, 3, 0, 359, 20, 1, 0, 0, 0, 360, 361, 5, 102, 0, 0, 361, 362, 5, 114, 0, 0, 362, 363, 5, 111, 0, 0, 363, 364, 5, 109, 0, 0, 364, 365, 1, 0, 0, 0, 365, 366, 6, 5, 4, 0, 366, 22, 1, 0, 0, 0, 367, 368, 5, 103, 0, 0, 368, 369, 5, 114, 0, 0, 369, 370, 5, 111, 0, 0, 370, 371, 5, 107, 0, 0, 371, 372, 1, 0, 0, 0, 372, 373, 6, 6, 0, 0, 373, 24, 1, 0, 0, 0, 374, 375, 5, 105, 0, 0, 375, 376, 5, 110, 0, 0, 376, 377, 5, 108, 0, 0, 377, 378, 5, 105, 0, 0, 378, 379, 5, 110, 0, 0, 379, 380, 5, 101, 0, 0, 380, 381, 5, 115, 0, 0, 381, 382, 5, 116, 0, 0, 382, 383, 5, 97, 0, 0, 383, 384, 5, 116, 0, 0, 384, 385, 5, 115, 0, 0, 385, 386, 1, 0, 0, 0, 386, 387, 6, 7, 0, 0, 387, 26, 1, 0, 0, 0, 388, 389, 5, 107, 0, 0, 389, 390, 5, 101, 0, 0, 390, 391, 5, 101, 0, 0, 391, 392, 5, 112, 0, 0, 392, 393, 1, 0, 0, 0, 393, 394, 6, 8, 1, 0, 394, 28, 1, 0, 0, 0, 395, 396, 5, 108, 0, 0, 396, 397, 5, 105, 0, 0, 397, 398, 5, 109, 0, 0, 398, 399, 5, 105, 0, 0, 399, 400, 5, 116, 0, 0, 400, 401, 1, 0, 0, 0, 401, 402, 6, 9, 0, 0, 402, 30, 1, 0, 0, 0, 403, 404, 5, 109, 0, 0, 404, 405, 5, 118, 0, 0, 405, 406, 5, 95, 0, 0, 406, 407, 5, 101, 0, 0, 407, 408, 5, 120, 0, 0, 408, 409, 5, 112, 0, 0, 409, 410, 5, 97, 0, 0, 410, 411, 5, 110, 0, 0, 411, 412, 5, 100, 0, 0, 412, 413, 1, 0, 0, 0, 413, 414, 6, 10, 5, 0, 414, 32, 1, 0, 0, 0, 415, 416, 5, 112, 0, 0, 416, 417, 5, 114, 0, 0, 417, 418, 5, 111, 0, 0, 418, 419, 5, 106, 0, 0, 419, 420, 5, 101, 0, 0, 420, 421, 5, 99, 0, 0, 421, 422, 5, 116, 0, 0, 422, 423, 1, 0, 0, 0, 423, 424, 6, 11, 1, 0, 424, 34, 1, 0, 0, 0, 425, 426, 5, 114, 0, 0, 426, 427, 5, 101, 0, 0, 427, 428, 5, 110, 0, 0, 428, 429, 5, 97, 0, 0, 429, 430, 5, 109, 0, 0, 430, 431, 5, 101, 0, 0, 431, 432, 1, 0, 0, 0, 432, 433, 6, 12, 6, 0, 433, 36, 1, 0, 0, 0, 434, 435, 5, 114, 0, 0, 435, 436, 5, 111, 0, 0, 436, 437, 5, 119, 0, 0, 437, 438, 1, 0, 0, 0, 438, 439, 6, 13, 0, 0, 439, 38, 1, 0, 0, 0, 440, 441, 5, 115, 0, 0, 441, 442, 5, 104, 0, 0, 442, 443, 5, 111, 0, 0, 443, 444, 5, 119, 0, 0, 444, 445, 1, 0, 0, 0, 445, 446, 6, 14, 7, 0, 446, 40, 1, 0, 0, 0, 447, 448, 5, 115, 0, 0, 448, 449, 5, 111, 0, 0, 449, 450, 5, 114, 0, 0, 450, 451, 5, 116, 0, 0, 451, 452, 1, 0, 0, 0, 452, 453, 6, 15, 0, 0, 453, 42, 1, 0, 0, 0, 454, 455, 5, 115, 0, 0, 455, 456, 5, 116, 0, 0, 456, 457, 5, 97, 0, 0, 457, 458, 5, 116, 0, 0, 458, 459, 5, 115, 0, 0, 459, 460, 1, 0, 0, 0, 460, 461, 6, 16, 0, 0, 461, 44, 1, 0, 0, 0, 462, 463, 5, 119, 0, 0, 463, 464, 5, 104, 0, 0, 464, 465, 5, 101, 0, 0, 465, 466, 5, 114, 0, 0, 466, 467, 5, 101, 0, 0, 467, 468, 1, 0, 0, 0, 468, 469, 6, 17, 0, 0, 469, 46, 1, 0, 0, 0, 470, 472, 8, 0, 0, 0, 471, 470, 1, 0, 0, 0, 472, 473, 1, 0, 0, 0, 473, 471, 1, 0, 0, 0, 473, 474, 1, 0, 0, 0, 474, 475, 1, 0, 0, 0, 475, 476, 6, 18, 0, 0, 476, 48, 1, 0, 0, 0, 477, 478, 5, 47, 0, 0, 478, 479, 5, 47, 0, 0, 479, 483, 1, 0, 0, 0, 480, 482, 8, 1, 0, 0, 481, 480, 1, 0, 0, 0, 482, 485, 1, 0, 0, 0, 483, 481, 1, 0, 0, 0, 483, 484, 1, 0, 0, 0, 484, 487, 1, 0, 0, 0, 485, 483, 1, 0, 0, 0, 486, 488, 5, 13, 0, 0, 487, 486, 1, 0, 0, 0, 487, 488, 1, 0, 0, 0, 488, 490, 1, 0, 0, 0, 489, 491, 5, 10, 0, 0, 490, 489, 1, 0, 0, 0, 490, 491, 1, 0, 0, 0, 491, 492, 1, 0, 0, 0, 492, 493, 6, 19, 8, 0, 493, 50, 1, 0, 0, 0, 494, 495, 5, 47, 0, 0, 495, 496, 5, 42, 0, 0, 496, 501, 1, 0, 0, 0, 497, 500, 3, 51, 20, 0, 498, 500, 9, 0, 0, 0, 499, 497, 1, 0, 0, 0, 499, 498, 1, 0, 0, 0, 500, 503, 1, 0, 0, 0, 501, 502, 1, 0, 0, 0, 501, 499, 1, 0, 0, 0, 502, 504, 1, 0, 0, 0, 503, 501, 1, 0, 0, 0, 504, 505, 5, 42, 0, 0, 505, 506, 5, 47, 0, 0, 506, 507, 1, 0, 0, 0, 507, 508, 6, 20, 8, 0, 508, 52, 1, 0, 0, 0, 509, 511, 7, 2, 0, 0, 510, 509, 1, 0, 0, 0, 511, 512, 1, 0, 0, 0, 512, 510, 1, 0, 0, 0, 512, 513, 1, 0, 0, 0, 513, 514, 1, 0, 0, 0, 514, 515, 6, 21, 8, 0, 515, 54, 1, 0, 0, 0, 516, 517, 3, 159, 74, 0, 517, 518, 1, 0, 0, 0, 518, 519, 6, 22, 9, 0, 519, 520, 6, 22, 10, 0, 520, 56, 1, 0, 0, 0, 521, 522, 3, 65, 27, 0, 522, 523, 1, 0, 0, 0, 523, 524, 6, 23, 11, 0, 524, 525, 6, 23, 12, 0, 525, 58, 1, 0, 0, 0, 526, 527, 3, 53, 21, 0, 527, 528, 1, 0, 0, 0, 528, 529, 6, 24, 8, 0, 529, 60, 1, 0, 0, 0, 530, 531, 3, 49, 19, 0, 531, 532, 1, 0, 0, 0, 532, 533, 6, 25, 8, 0, 533, 62, 1, 0, 0, 0, 534, 535, 3, 51, 20, 0, 535, 536, 1, 0, 0, 0, 536, 537, 6, 26, 8, 0, 537, 64, 1, 0, 0, 0, 538, 539, 5, 124, 0, 0, 539, 540, 1, 0, 0, 0, 540, 541, 6, 27, 12, 0, 541, 66, 1, 0, 0, 0, 542, 543, 7, 3, 0, 0, 543, 68, 1, 0, 0, 0, 544, 545, 7, 4, 0, 0, 545, 70, 1, 0, 0, 0, 546, 547, 5, 92, 0, 0, 547, 548, 7, 5, 0, 0, 548, 72, 1, 0, 0, 0, 549, 550, 8, 6, 0, 0, 550, 74, 1, 0, 0, 0, 551, 553, 7, 7, 0, 0, 552, 554, 7, 8, 0, 0, 553, 552, 1, 0, 0, 0, 553, 554, 1, 0, 0, 0, 554, 556, 1, 0, 0, 0, 555, 557, 3, 67, 28, 0, 556, 555, 1, 0, 0, 0, 557, 558, 1, 0, 0, 0, 558, 556, 1, 0, 0, 0, 558, 559, 1, 0, 0, 0, 559, 76, 1, 0, 0, 0, 560, 561, 5, 64, 0, 0, 561, 78, 1, 0, 0, 0, 562, 563, 5, 96, 0, 0, 563, 80, 1, 0, 0, 0, 564, 568, 8, 9, 0, 0, 565, 566, 5, 96, 0, 0, 566, 568, 5, 96, 0, 0, 567, 564, 1, 0, 0, 0, 567, 565, 1, 0, 0, 0, 568, 82, 1, 0, 0, 0, 569, 570, 5, 95, 0, 0, 570, 84, 1, 0, 0, 0, 571, 575, 3, 69, 29, 0, 572, 575, 3, 67, 28, 0, 573, 575, 3, 83, 36, 0, 574, 571, 1, 0, 0, 0, 574, 572, 1, 0, 0, 0, 574, 573, 1, 0, 0, 0, 575, 86, 1, 0, 0, 0, 576, 581, 5, 34, 0, 0, 577, 580, 3, 71, 30, 0, 578, 580, 3, 73, 31, 0, 579, 577, 1, 0, 0, 0, 579, 578, 1, 0, 0, 0, 580, 583, 1, 0, 0, 0, 581, 579, 1, 0, 0, 0, 581, 582, 1, 0, 0, 0, 582, 584, 1, 0, 0, 0, 583, 581, 1, 0, 0, 0, 584, 606, 5, 34, 0, 0, 585, 586, 5, 34, 0, 0, 586, 587, 5, 34, 0, 0, 587, 588, 5, 34, 0, 0, 588, 592, 1, 0, 0, 0, 589, 591, 8, 1, 0, 0, 590, 589, 1, 0, 0, 0, 591, 594, 1, 0, 0, 0, 592, 593, 1, 0, 0, 0, 592, 590, 1, 0, 0, 0, 593, 595, 1, 0, 0, 0, 594, 592, 1, 0, 0, 0, 595, 596, 5, 34, 0, 0, 596, 597, 5, 34, 0, 0, 597, 598, 5, 34, 0, 0, 598, 600, 1, 0, 0, 0, 599, 601, 5, 34, 0, 0, 600, 599, 1, 0, 0, 0, 600, 601, 1, 0, 0, 0, 601, 603, 1, 0, 0, 0, 602, 604, 5, 34, 0, 0, 603, 602, 1, 0, 0, 0, 603, 604, 1, 0, 0, 0, 604, 606, 1, 0, 0, 0, 605, 576, 1, 0, 0, 0, 605, 585, 1, 0, 0, 0, 606, 88, 1, 0, 0, 0, 607, 609, 3, 67, 28, 0, 608, 607, 1, 0, 0, 0, 609, 610, 1, 0, 0, 0, 610, 608, 1, 0, 0, 0, 610, 611, 1, 0, 0, 0, 611, 90, 1, 0, 0, 0, 612, 614, 3, 67, 28, 0, 613, 612, 1, 0, 0, 0, 614, 615, 1, 0, 0, 0, 615, 613, 1, 0, 0, 0, 615, 616, 1, 0, 0, 0, 616, 617, 1, 0, 0, 0, 617, 621, 3, 105, 47, 0, 618, 620, 3, 67, 28, 0, 619, 618, 1, 0, 0, 0, 620, 623, 1, 0, 0, 0, 621, 619, 1, 0, 0, 0, 621, 622, 1, 0, 0, 0, 622, 655, 1, 0, 0, 0, 623, 621, 1, 0, 0, 0, 624, 626, 3, 105, 47, 0, 625, 627, 3, 67, 28, 0, 626, 625, 1, 0, 0, 0, 627, 628, 1, 0, 0, 0, 628, 626, 1, 0, 0, 0, 628, 629, 1, 0, 0, 0, 629, 655, 1, 0, 0, 0, 630, 632, 3, 67, 28, 0, 631, 630, 1, 0, 0, 0, 632, 633, 1, 0, 0, 0, 633, 631, 1, 0, 0, 0, 633, 634, 1, 0, 0, 0, 634, 642, 1, 0, 0, 0, 635, 639, 3, 105, 47, 0, 636, 638, 3, 67, 28, 0, 637, 636, 1, 0, 0, 0, 638, 641, 1, 0, 0, 0, 639, 637, 1, 0, 0, 0, 639, 640, 1, 0, 0, 0, 640, 643, 1, 0, 0, 0, 641, 639, 1, 0, 0, 0, 642, 635, 1, 0, 0, 0, 642, 643, 1, 0, 0, 0, 643, 644, 1, 0, 0, 0, 644, 645, 3, 75, 32, 0, 645, 655, 1, 0, 0, 0, 646, 648, 3, 105, 47, 0, 647, 649, 3, 67, 28, 0, 648, 647, 1, 0, 0, 0, 649, 650, 1, 0, 0, 0, 650, 648, 1, 0, 0, 0, 650, 651, 1, 0, 0, 0, 651, 652, 1, 0, 0, 0, 652, 653, 3, 75, 32, 0, 653, 655, 1, 0, 0, 0, 654, 613, 1, 0, 0, 0, 654, 624, 1, 0, 0, 0, 654, 631, 1, 0, 0, 0, 654, 646, 1, 0, 0, 0, 655, 92, 1, 0, 0, 0, 656, 657, 5, 98, 0, 0, 657, 658, 5, 121, 0, 0, 658, 94, 1, 0, 0, 0, 659, 660, 5, 97, 0, 0, 660, 661, 5, 110, 0, 0, 661, 662, 5, 100, 0, 0, 662, 96, 1, 0, 0, 0, 663, 664, 5, 97, 0, 0, 664, 665, 5, 115, 0, 0, 665, 666, 5, 99, 0, 0, 666, 98, 1, 0, 0, 0, 667, 668, 5, 61, 0, 0, 668, 100, 1, 0, 0, 0, 669, 670, 5, 44, 0, 0, 670, 102, 1, 0, 0, 0, 671, 672, 5, 100, 0, 0, 672, 673, 5, 101, 0, 0, 673, 674, 5, 115, 0, 0, 674, 675, 5, 99, 0, 0, 675, 104, 1, 0, 0, 0, 676, 677, 5, 46, 0, 0, 677, 106, 1, 0, 0, 0, 678, 679, 5, 102, 0, 0, 679, 680, 5, 97, 0, 0, 680, 681, 5, 108, 0, 0, 681, 682, 5, 115, 0, 0, 682, 683, 5, 101, 0, 0, 683, 108, 1, 0, 0, 0, 684, 685, 5, 102, 0, 0, 685, 686, 5, 105, 0, 0, 686, 687, 5, 114, 0, 0, 687, 688, 5, 115, 0, 0, 688, 689, 5, 116, 0, 0, 689, 110, 1, 0, 0, 0, 690, 691, 5, 108, 0, 0, 691, 692, 5, 97, 0, 0, 692, 693, 5, 115, 0, 0, 693, 694, 5, 116, 0, 0, 694, 112, 1, 0, 0, 0, 695, 696, 5, 40, 0, 0, 696, 114, 1, 0, 0, 0, 697, 698, 5, 105, 0, 0, 698, 699, 5, 110, 0, 0, 699, 116, 1, 0, 0, 0, 700, 701, 5, 105, 0, 0, 701, 702, 5, 115, 0, 0, 702, 118, 1, 0, 0, 0, 703, 704, 5, 108, 0, 0, 704, 705, 5, 105, 0, 0, 705, 706, 5, 107, 0, 0, 706, 707, 5, 101, 0, 0, 707, 120, 1, 0, 0, 0, 708, 709, 5, 110, 0, 0, 709, 710, 5, 111, 0, 0, 710, 711, 5, 116, 0, 0, 711, 122, 1, 0, 0, 0, 712, 713, 5, 110, 0, 0, 713, 714, 5, 117, 0, 0, 714, 715, 5, 108, 0, 0, 715, 716, 5, 108, 0, 0, 716, 124, 1, 0, 0, 0, 717, 718, 5, 110, 0, 0, 718, 719, 5, 117, 0, 0, 719, 720, 5, 108, 0, 0, 720, 721, 5, 108, 0, 0, 721, 722, 5, 115, 0, 0, 722, 126, 1, 0, 0, 0, 723, 724, 5, 111, 0, 0, 724, 725, 5, 114, 0, 0, 725, 128, 1, 0, 0, 0, 726, 727, 5, 63, 0, 0, 727, 130, 1, 0, 0, 0, 728, 729, 5, 114, 0, 0, 729, 730, 5, 108, 0, 0, 730, 731, 5, 105, 0, 0, 731, 732, 5, 107, 0, 0, 732, 733, 5, 101, 0, 0, 733, 132, 1, 0, 0, 0, 734, 735, 5, 41, 0, 0, 735, 134, 1, 0, 0, 0, 736, 737, 5, 116, 0, 0, 737, 738, 5, 114, 0, 0, 738, 739, 5, 117, 0, 0, 739, 740, 5, 101, 0, 0, 740, 136, 1, 0, 0, 0, 741, 742, 5, 61, 0, 0, 742, 743, 5, 61, 0, 0, 743, 138, 1, 0, 0, 0, 744, 745, 5, 33, 0, 0, 745, 746, 5, 61, 0, 0, 746, 140, 1, 0, 0, 0, 747, 748, 5, 60, 0, 0, 748, 142, 1, 0, 0, 0, 749, 750, 5, 60, 0, 0, 750, 751, 5, 61, 0, 0, 751, 144, 1, 0, 0, 0, 752, 753, 5, 62, 0, 0, 753, 146, 1, 0, 0, 0, 754, 755, 5, 62, 0, 0, 755, 756, 5, 61, 0, 0, 756, 148, 1, 0, 0, 0, 757, 758, 5, 43, 0, 0, 758, 150, 1, 0, 0, 0, 759, 760, 5, 45, 0, 0, 760, 152, 1, 0, 0, 0, 761, 762, 5, 42, 0, 0, 762, 154, 1, 0, 0, 0, 763, 764, 5, 47, 0, 0, 764, 156, 1, 0, 0, 0, 765, 766, 5, 37, 0, 0, 766, 158, 1, 0, 0, 0, 767, 768, 5, 91, 0, 0, 768, 769, 1, 0, 0, 0, 769, 770, 6, 74, 0, 0, 770, 771, 6, 74, 0, 0, 771, 160, 1, 0, 0, 0, 772, 773, 5, 93, 0, 0, 773, 774, 1, 0, 0, 0, 774, 775, 6, 75, 12, 0, 775, 776, 6, 75, 12, 0, 776, 162, 1, 0, 0, 0, 777, 781, 3, 69, 29, 0, 778, 780, 3, 85, 37, 0, 779, 778, 1, 0, 0, 0, 780, 783, 1, 0, 0, 0, 781, 779, 1, 0, 0, 0, 781, 782, 1, 0, 0, 0, 782, 794, 1, 0, 0, 0, 783, 781, 1, 0, 0, 0, 784, 787, 3, 83, 36, 0, 785, 787, 3, 77, 33, 0, 786, 784, 1, 0, 0, 0, 786, 785, 1, 0, 0, 0, 787, 789, 1, 0, 0, 0, 788, 790, 3, 85, 37, 0, 789, 788, 1, 0, 0, 0, 790, 791, 1, 0, 0, 0, 791, 789, 1, 0, 0, 0, 791, 792, 1, 0, 0, 0, 792, 794, 1, 0, 0, 0, 793, 777, 1, 0, 0, 0, 793, 786, 1, 0, 0, 0, 794, 164, 1, 0, 0, 0, 795, 797, 3, 79, 34, 0, 796, 798, 3, 81, 35, 0, 797, 796, 1, 0, 0, 0, 798, 799, 1, 0, 0, 0, 799, 797, 1, 0, 0, 0, 799, 800, 1, 0, 0, 0, 800, 801, 1, 0, 0, 0, 801, 802, 3, 79, 34, 0, 802, 166, 1, 0, 0, 0, 803, 804, 3, 49, 19, 0, 804, 805, 1, 0, 0, 0, 805, 806, 6, 78, 8, 0, 806, 168, 1, 0, 0, 0, 807, 808, 3, 51, 20, 0, 808, 809, 1, 0, 0, 0, 809, 810, 6, 79, 8, 0, 810, 170, 1, 0, 0, 0, 811, 812, 3, 53, 21, 0, 812, 813, 1, 0, 0, 0, 813, 814, 6, 80, 8, 0, 814, 172, 1, 0, 0, 0, 815, 816, 3, 65, 27, 0, 816, 817, 1, 0, 0, 0, 817, 818, 6, 81, 11, 0, 818, 819, 6, 81, 12, 0, 819, 174, 1, 0, 0, 0, 820, 821, 3, 159, 74, 0, 821, 822, 1, 0, 0, 0, 822, 823, 6, 82, 9, 0, 823, 176, 1, 0, 0, 0, 824, 825, 3, 161, 75, 0, 825, 826, 1, 0, 0, 0, 826, 827, 6, 83, 13, 0, 827, 178, 1, 0, 0, 0, 828, 829, 3, 101, 45, 0, 829, 830, 1, 0, 0, 0, 830, 831, 6, 84, 14, 0, 831, 180, 1, 0, 0, 0, 832, 833, 3, 99, 44, 0, 833, 834, 1, 0, 0, 0, 834, 835, 6, 85, 15, 0, 835, 182, 1, 0, 0, 0, 836, 837, 5, 109, 0, 0, 837, 838, 5, 101, 0, 0, 838, 839, 5, 116, 0, 0, 839, 840, 5, 97, 0, 0, 840, 841, 5, 100, 0, 0, 841, 842, 5, 97, 0, 0, 842, 843, 5, 116, 0, 0, 843, 844, 5, 97, 0, 0, 844, 184, 1, 0, 0, 0, 845, 849, 8, 10, 0, 0, 846, 847, 5, 47, 0, 0, 847, 849, 8, 11, 0, 0, 848, 845, 1, 0, 0, 0, 848, 846, 1, 0, 0, 0, 849, 186, 1, 0, 0, 0, 850, 852, 3, 185, 87, 0, 851, 850, 1, 0, 0, 0, 852, 853, 1, 0, 0, 0, 853, 851, 1, 0, 0, 0, 853, 854, 1, 0, 0, 0, 854, 188, 1, 0, 0, 0, 855, 856, 3, 165, 77, 0, 856, 857, 1, 0, 0, 0, 857, 858, 6, 89, 16, 0, 858, 190, 1, 0, 0, 0, 859, 860, 3, 49, 19, 0, 860, 861, 1, 0, 0, 0, 861, 862, 6, 90, 8, 0, 862, 192, 1, 0, 0, 0, 863, 864, 3, 51, 20, 0, 864, 865, 1, 0, 0, 0, 865, 866, 6, 91, 8, 0, 866, 194, 1, 0, 0, 0, 867, 868, 3, 53, 21, 0, 868, 869, 1, 0, 0, 0, 869, 870, 6, 92, 8, 0, 870, 196, 1, 0, 0, 0, 871, 872, 3, 65, 27, 0, 872, 873, 1, 0, 0, 0, 873, 874, 6, 93, 11, 0, 874, 875, 6, 93, 12, 0, 875, 198, 1, 0, 0, 0, 876, 877, 3, 105, 47, 0, 877, 878, 1, 0, 0, 0, 878, 879, 6, 94, 17, 0, 879, 200, 1, 0, 0, 0, 880, 881, 3, 101, 45, 0, 881, 882, 1, 0, 0, 0, 882, 883, 6, 95, 14, 0, 883, 202, 1, 0, 0, 0, 884, 889, 3, 69, 29, 0, 885, 889, 3, 67, 28, 0, 886, 889, 3, 83, 36, 0, 887, 889, 3, 153, 71, 0, 888, 884, 1, 0, 0, 0, 888, 885, 1, 0, 0, 0, 888, 886, 1, 0, 0, 0, 888, 887, 1, 0, 0, 0, 889, 204, 1, 0, 0, 0, 890, 893, 3, 69, 29, 0, 891, 893, 3, 153, 71, 0, 892, 890, 1, 0, 0, 0, 892, 891, 1, 0, 0, 0, 893, 897, 1, 0, 0, 0, 894, 896, 3, 203, 96, 0, 895, 894, 1, 0, 0, 0, 896, 899, 1, 0, 0, 0, 897, 895, 1, 0, 0, 0, 897, 898, 1, 0, 0, 0, 898, 910, 1, 0, 0, 0, 899, 897, 1, 0, 0, 0, 900, 903, 3, 83, 36, 0, 901, 903, 3, 77, 33, 0, 902, 900, 1, 0, 0, 0, 902, 901, 1, 0, 0, 0, 903, 905, 1, 0, 0, 0, 904, 906, 3, 203, 96, 0, 905, 904, 1, 0, 0, 0, 906, 907, 1, 0, 0, 0, 907, 905, 1, 0, 0, 0, 907, 908, 1, 0, 0, 0, 908, 910, 1, 0, 0, 0, 909, 892, 1, 0, 0, 0, 909, 902, 1, 0, 0, 0, 910, 206, 1, 0, 0, 0, 911, 912, 3, 205, 97, 0, 912, 913, 1, 0, 0, 0, 913, 914, 6, 98, 18, 0, 914, 208, 1, 0, 0, 0, 915, 916, 3, 165, 77, 0, 916, 917, 1, 0, 0, 0, 917, 918, 6, 99, 16, 0, 918, 210, 1, 0, 0, 0, 919, 920, 3, 49, 19, 0, 920, 921, 1, 0, 0, 0, 921, 922, 6, 100, 8, 0, 922, 212, 1, 0, 0, 0, 923, 924, 3, 51, 20, 0, 924, 925, 1, 0, 0, 0, 925, 926, 6, 101, 8, 0, 926, 214, 1, 0, 0, 0, 927, 928, 3, 53, 21, 0, 928, 929, 1, 0, 0, 0, 929, 930, 6, 102, 8, 0, 930, 216, 1, 0, 0, 0, 931, 932, 3, 65, 27, 0, 932, 933, 1, 0, 0, 0, 933, 934, 6, 103, 11, 0, 934, 935, 6, 103, 12, 0, 935, 218, 1, 0, 0, 0, 936, 937, 3, 99, 44, 0, 937, 938, 1, 0, 0, 0, 938, 939, 6, 104, 15, 0, 939, 220, 1, 0, 0, 0, 940, 941, 3, 101, 45, 0, 941, 942, 1, 0, 0, 0, 942, 943, 6, 105, 14, 0, 943, 222, 1, 0, 0, 0, 944, 945, 3, 105, 47, 0, 945, 946, 1, 0, 0, 0, 946, 947, 6, 106, 17, 0, 947, 224, 1, 0, 0, 0, 948, 949, 5, 97, 0, 0, 949, 950, 5, 115, 0, 0, 950, 226, 1, 0, 0, 0, 951, 952, 3, 165, 77, 0, 952, 953, 1, 0, 0, 0, 953, 954, 6, 108, 16, 0, 954, 228, 1, 0, 0, 0, 955, 956, 3, 205, 97, 0, 956, 957, 1, 0, 0, 0, 957, 958, 6, 109, 18, 0, 958, 230, 1, 0, 0, 0, 959, 960, 3, 49, 19, 0, 960, 961, 1, 0, 0, 0, 961, 962, 6, 110, 8, 0, 962, 232, 1, 0, 0, 0, 963, 964, 3, 51, 20, 0, 964, 965, 1, 0, 0, 0, 965, 966, 6, 111, 8, 0, 966, 234, 1, 0, 0, 0, 967, 968, 3, 53, 21, 0, 968, 969, 1, 0, 0, 0, 969, 970, 6, 112, 8, 0, 970, 236, 1, 0, 0, 0, 971, 972, 3, 65, 27, 0, 972, 973, 1, 0, 0, 0, 973, 974, 6, 113, 11, 0, 974, 975, 6, 113, 12, 0, 975, 238, 1, 0, 0, 0, 976, 977, 3, 159, 74, 0, 977, 978, 1, 0, 0, 0, 978, 979, 6, 114, 9, 0, 979, 980, 6, 114, 19, 0, 980, 240, 1, 0, 0, 0, 981, 982, 5, 111, 0, 0, 982, 983, 5, 110, 0, 0, 983, 984, 1, 0, 0, 0, 984, 985, 6, 115, 20, 0, 985, 242, 1, 0, 0, 0, 986, 987, 5, 119, 0, 0, 987, 988, 5, 105, 0, 0, 988, 989, 5, 116, 0, 0, 989, 990, 5, 104, 0, 0, 990, 991, 1, 0, 0, 0, 991, 992, 6, 116, 20, 0, 992, 244, 1, 0, 0, 0, 993, 994, 8, 12, 0, 0, 994, 246, 1, 0, 0, 0, 995, 998, 3, 69, 29, 0, 996, 998, 3, 67, 28, 0, 997, 995, 1, 0, 0, 0, 997, 996, 1, 0, 0, 0, 998, 1002, 1, 0, 0, 0, 999, 1001, 3, 245, 117, 0, 1000, 999, 1, 0, 0, 0, 1001, 1004, 1, 0, 0, 0, 1002, 1000, 1, 0, 0, 0, 1002, 1003, 1, 0, 0, 0, 1003, 248, 1, 0, 0, 0, 1004, 1002, 1, 0, 0, 0, 1005, 1006, 3, 165, 77, 0, 1006, 1007, 1, 0, 0, 0, 1007, 1008, 6, 119, 16, 0, 1008, 250, 1, 0, 0, 0, 1009, 1010, 3, 247, 118, 0, 1010, 1011, 1, 0, 0, 0, 1011, 1012, 6, 120, 21, 0, 1012, 252, 1, 0, 0, 0, 1013, 1014, 3, 49, 19, 0, 1014, 1015, 1, 0, 0, 0, 1015, 1016, 6, 121, 8, 0, 1016, 254, 1, 0, 0, 0, 1017, 1018, 3, 51, 20, 0, 1018, 1019, 1, 0, 0, 0, 1019, 1020, 6, 122, 8, 0, 1020, 256, 1, 0, 0, 0, 1021, 1022, 3, 53, 21, 0, 1022, 1023, 1, 0, 0, 0, 1023, 1024, 6, 123, 8, 0, 1024, 258, 1, 0, 0, 0, 1025, 1026, 3, 65, 27, 0, 1026, 1027, 1, 0, 0, 0, 1027, 1028, 6, 124, 11, 0, 1028, 1029, 6, 124, 12, 0, 1029, 1030, 6, 124, 12, 0, 1030, 260, 1, 0, 0, 0, 1031, 1032, 3, 99, 44, 0, 1032, 1033, 1, 0, 0, 0, 1033, 1034, 6, 125, 15, 0, 1034, 262, 1, 0, 0, 0, 1035, 1036, 3, 101, 45, 0, 1036, 1037, 1, 0, 0, 0, 1037, 1038, 6, 126, 14, 0, 1038, 264, 1, 0, 0, 0, 1039, 1040, 3, 105, 47, 0, 1040, 1041, 1, 0, 0, 0, 1041, 1042, 6, 127, 17, 0, 1042, 266, 1, 0, 0, 0, 1043, 1044, 3, 243, 116, 0, 1044, 1045, 1, 0, 0, 0, 1045, 1046, 6, 128, 22, 0, 1046, 268, 1, 0, 0, 0, 1047, 1048, 3, 205, 97, 0, 1048, 1049, 1, 0, 0, 0, 1049, 1050, 6, 129, 18, 0, 1050, 270, 1, 0, 0, 0, 1051, 1052, 3, 165, 77, 0, 1052, 1053, 1, 0, 0, 0, 1053, 1054, 6, 130, 16, 0, 1054, 272, 1, 0, 0, 0, 1055, 1056, 3, 49, 19, 0, 1056, 1057, 1, 0, 0, 0, 1057, 1058, 6, 131, 8, 0, 1058, 274, 1, 0, 0, 0, 1059, 1060, 3, 51, 20, 0, 1060, 1061, 1, 0, 0, 0, 1061, 1062, 6, 132, 8, 0, 1062, 276, 1, 0, 0, 0, 1063, 1064, 3, 53, 21, 0, 1064, 1065, 1, 0, 0, 0, 1065, 1066, 6, 133, 8, 0, 1066, 278, 1, 0, 0, 0, 1067, 1068, 3, 65, 27, 0, 1068, 1069, 1, 0, 0, 0, 1069, 1070, 6, 134, 11, 0, 1070, 1071, 6, 134, 12, 0, 1071, 280, 1, 0, 0, 0, 1072, 1073, 3, 105, 47, 0, 1073, 1074, 1, 0, 0, 0, 1074, 1075, 6, 135, 17, 0, 1075, 282, 1, 0, 0, 0, 1076, 1077, 3, 165, 77, 0, 1077, 1078, 1, 0, 0, 0, 1078, 1079, 6, 136, 16, 0, 1079, 284, 1, 0, 0, 0, 1080, 1081, 3, 163, 76, 0, 1081, 1082, 1, 0, 0, 0, 1082, 1083, 6, 137, 23, 0, 1083, 286, 1, 0, 0, 0, 1084, 1085, 3, 49, 19, 0, 1085, 1086, 1, 0, 0, 0, 1086, 1087, 6, 138, 8, 0, 1087, 288, 1, 0, 0, 0, 1088, 1089, 3, 51, 20, 0, 1089, 1090, 1, 0, 0, 0, 1090, 1091, 6, 139, 8, 0, 1091, 290, 1, 0, 0, 0, 1092, 1093, 3, 53, 21, 0, 1093, 1094, 1, 0, 0, 0, 1094, 1095, 6, 140, 8, 0, 1095, 292, 1, 0, 0, 0, 1096, 1097, 3, 65, 27, 0, 1097, 1098, 1, 0, 0, 0, 1098, 1099, 6, 141, 11, 0, 1099, 1100, 6, 141, 12, 0, 1100, 294, 1, 0, 0, 0, 1101, 1102, 5, 105, 0, 0, 1102, 1103, 5, 110, 0, 0, 1103, 1104, 5, 102, 0, 0, 1104, 1105, 5, 111, 0, 0, 1105, 296, 1, 0, 0, 0, 1106, 1107, 5, 102, 0, 0, 1107, 1108, 5, 117, 0, 0, 1108, 1109, 5, 110, 0, 0, 1109, 1110, 5, 99, 0, 0, 1110, 1111, 5, 116, 0, 0, 1111, 1112, 5, 105, 0, 0, 1112, 1113, 5, 111, 0, 0, 1113, 1114, 5, 110, 0, 0, 1114, 1115, 5, 115, 0, 0, 1115, 298, 1, 0, 0, 0, 1116, 1117, 3, 49, 19, 0, 1117, 1118, 1, 0, 0, 0, 1118, 1119, 6, 144, 8, 0, 1119, 300, 1, 0, 0, 0, 1120, 1121, 3, 51, 20, 0, 1121, 1122, 1, 0, 0, 0, 1122, 1123, 6, 145, 8, 0, 1123, 302, 1, 0, 0, 0, 1124, 1125, 3, 53, 21, 0, 1125, 1126, 1, 0, 0, 0, 1126, 1127, 6, 146, 8, 0, 1127, 304, 1, 0, 0, 0, 1128, 1129, 3, 161, 75, 0, 1129, 1130, 1, 0, 0, 0, 1130, 1131, 6, 147, 13, 0, 1131, 1132, 6, 147, 12, 0, 1132, 306, 1, 0, 0, 0, 1133, 1134, 5, 58, 0, 0, 1134, 308, 1, 0, 0, 0, 1135, 1141, 3, 77, 33, 0, 1136, 1141, 3, 67, 28, 0, 1137, 1141, 3, 105, 47, 0, 1138, 1141, 3, 69, 29, 0, 1139, 1141, 3, 83, 36, 0, 1140, 1135, 1, 0, 0, 0, 1140, 1136, 1, 0, 0, 0, 1140, 1137, 1, 0, 0, 0, 1140, 1138, 1, 0, 0, 0, 1140, 1139, 1, 0, 0, 0, 1141, 1142, 1, 0, 0, 0, 1142, 1140, 1, 0, 0, 0, 1142, 1143, 1, 0, 0, 0, 1143, 310, 1, 0, 0, 0, 1144, 1145, 3, 49, 19, 0, 1145, 1146, 1, 0, 0, 0, 1146, 1147, 6, 150, 8, 0, 1147, 312, 1, 0, 0, 0, 1148, 1149, 3, 51, 20, 0, 1149, 1150, 1, 0, 0, 0, 1150, 1151, 6, 151, 8, 0, 1151, 314, 1, 0, 0, 0, 1152, 1153, 3, 53, 21, 0, 1153, 1154, 1, 0, 0, 0, 1154, 1155, 6, 152, 8, 0, 1155, 316, 1, 0, 0, 0, 54, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 473, 483, 487, 490, 499, 501, 512, 553, 558, 567, 574, 579, 581, 592, 600, 603, 605, 610, 615, 621, 628, 633, 639, 642, 650, 654, 781, 786, 791, 793, 799, 848, 853, 888, 892, 897, 902, 907, 909, 997, 1002, 1140, 1142, 24, 5, 2, 0, 5, 4, 0, 5, 6, 0, 5, 1, 0, 5, 3, 0, 5, 8, 0, 5, 5, 0, 5, 9, 0, 0, 1, 0, 7, 63, 0, 5, 0, 0, 7, 26, 0, 4, 0, 0, 7, 64, 0, 7, 34, 0, 7, 33, 0, 7, 66, 0, 7, 36, 0, 7, 75, 0, 5, 10, 0, 5, 7, 0, 7, 85, 0, 7, 84, 0, 7, 65, 0] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java index 8946172327bcc..e1902fbae4b5a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java @@ -28,24 +28,26 @@ public class EsqlBaseLexer extends Lexer { PLUS=58, MINUS=59, ASTERISK=60, SLASH=61, PERCENT=62, OPENING_BRACKET=63, CLOSING_BRACKET=64, UNQUOTED_IDENTIFIER=65, QUOTED_IDENTIFIER=66, EXPR_LINE_COMMENT=67, EXPR_MULTILINE_COMMENT=68, EXPR_WS=69, METADATA=70, FROM_UNQUOTED_IDENTIFIER=71, - FROM_LINE_COMMENT=72, FROM_MULTILINE_COMMENT=73, FROM_WS=74, PROJECT_UNQUOTED_IDENTIFIER=75, + FROM_LINE_COMMENT=72, FROM_MULTILINE_COMMENT=73, FROM_WS=74, UNQUOTED_ID_PATTERN=75, PROJECT_LINE_COMMENT=76, PROJECT_MULTILINE_COMMENT=77, PROJECT_WS=78, AS=79, RENAME_LINE_COMMENT=80, RENAME_MULTILINE_COMMENT=81, RENAME_WS=82, - ON=83, WITH=84, ENRICH_LINE_COMMENT=85, ENRICH_MULTILINE_COMMENT=86, ENRICH_WS=87, - ENRICH_FIELD_LINE_COMMENT=88, ENRICH_FIELD_MULTILINE_COMMENT=89, ENRICH_FIELD_WS=90, - MVEXPAND_LINE_COMMENT=91, MVEXPAND_MULTILINE_COMMENT=92, MVEXPAND_WS=93, - INFO=94, FUNCTIONS=95, SHOW_LINE_COMMENT=96, SHOW_MULTILINE_COMMENT=97, - SHOW_WS=98; + ON=83, WITH=84, ENRICH_POLICY_NAME=85, ENRICH_LINE_COMMENT=86, ENRICH_MULTILINE_COMMENT=87, + ENRICH_WS=88, ENRICH_FIELD_LINE_COMMENT=89, ENRICH_FIELD_MULTILINE_COMMENT=90, + ENRICH_FIELD_WS=91, MVEXPAND_LINE_COMMENT=92, MVEXPAND_MULTILINE_COMMENT=93, + MVEXPAND_WS=94, INFO=95, FUNCTIONS=96, SHOW_LINE_COMMENT=97, SHOW_MULTILINE_COMMENT=98, + SHOW_WS=99, COLON=100, SETTING=101, SETTING_LINE_COMMENT=102, SETTTING_MULTILINE_COMMENT=103, + SETTING_WS=104; public static final int EXPLAIN_MODE=1, EXPRESSION_MODE=2, FROM_MODE=3, PROJECT_MODE=4, RENAME_MODE=5, - ENRICH_MODE=6, ENRICH_FIELD_MODE=7, MVEXPAND_MODE=8, SHOW_MODE=9; + ENRICH_MODE=6, ENRICH_FIELD_MODE=7, MVEXPAND_MODE=8, SHOW_MODE=9, SETTING_MODE=10; public static String[] channelNames = { "DEFAULT_TOKEN_CHANNEL", "HIDDEN" }; public static String[] modeNames = { "DEFAULT_MODE", "EXPLAIN_MODE", "EXPRESSION_MODE", "FROM_MODE", "PROJECT_MODE", - "RENAME_MODE", "ENRICH_MODE", "ENRICH_FIELD_MODE", "MVEXPAND_MODE", "SHOW_MODE" + "RENAME_MODE", "ENRICH_MODE", "ENRICH_FIELD_MODE", "MVEXPAND_MODE", "SHOW_MODE", + "SETTING_MODE" }; private static String[] makeRuleNames() { @@ -66,19 +68,21 @@ private static String[] makeRuleNames() { "FROM_COMMA", "FROM_ASSIGN", "METADATA", "FROM_UNQUOTED_IDENTIFIER_PART", "FROM_UNQUOTED_IDENTIFIER", "FROM_QUOTED_IDENTIFIER", "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", "FROM_WS", "PROJECT_PIPE", "PROJECT_DOT", "PROJECT_COMMA", - "UNQUOTED_ID_BODY_WITH_PATTERN", "PROJECT_UNQUOTED_IDENTIFIER", "PROJECT_QUOTED_IDENTIFIER", - "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", "PROJECT_WS", "RENAME_PIPE", - "RENAME_ASSIGN", "RENAME_COMMA", "RENAME_DOT", "AS", "RENAME_QUOTED_IDENTIFIER", - "RENAME_UNQUOTED_IDENTIFIER", "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", - "RENAME_WS", "ENRICH_PIPE", "ON", "WITH", "ENRICH_POLICY_UNQUOTED_IDENTIFIER", - "ENRICH_QUOTED_IDENTIFIER", "ENRICH_LINE_COMMENT", "ENRICH_MULTILINE_COMMENT", + "UNQUOTED_ID_BODY_WITH_PATTERN", "UNQUOTED_ID_PATTERN", "PROJECT_UNQUOTED_IDENTIFIER", + "PROJECT_QUOTED_IDENTIFIER", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", + "PROJECT_WS", "RENAME_PIPE", "RENAME_ASSIGN", "RENAME_COMMA", "RENAME_DOT", + "AS", "RENAME_QUOTED_IDENTIFIER", "RENAME_UNQUOTED_IDENTIFIER", "RENAME_LINE_COMMENT", + "RENAME_MULTILINE_COMMENT", "RENAME_WS", "ENRICH_PIPE", "ENRICH_OPENING_BRACKET", + "ON", "WITH", "ENRICH_POLICY_NAME_BODY", "ENRICH_POLICY_NAME", "ENRICH_QUOTED_IDENTIFIER", + "ENRICH_MODE_UNQUOTED_VALUE", "ENRICH_LINE_COMMENT", "ENRICH_MULTILINE_COMMENT", "ENRICH_WS", "ENRICH_FIELD_PIPE", "ENRICH_FIELD_ASSIGN", "ENRICH_FIELD_COMMA", "ENRICH_FIELD_DOT", "ENRICH_FIELD_WITH", "ENRICH_FIELD_UNQUOTED_IDENTIFIER", "ENRICH_FIELD_QUOTED_IDENTIFIER", "ENRICH_FIELD_LINE_COMMENT", "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", "MVEXPAND_PIPE", "MVEXPAND_DOT", "MVEXPAND_QUOTED_IDENTIFIER", "MVEXPAND_UNQUOTED_IDENTIFIER", "MVEXPAND_LINE_COMMENT", "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", "SHOW_PIPE", "INFO", "FUNCTIONS", "SHOW_LINE_COMMENT", - "SHOW_MULTILINE_COMMENT", "SHOW_WS" + "SHOW_MULTILINE_COMMENT", "SHOW_WS", "SETTING_CLOSING_BRACKET", "COLON", + "SETTING", "SETTING_LINE_COMMENT", "SETTTING_MULTILINE_COMMENT", "SETTING_WS" }; } public static final String[] ruleNames = makeRuleNames(); @@ -95,7 +99,8 @@ private static String[] makeLiteralNames() { "'>'", "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", null, "']'", null, null, null, null, null, "'metadata'", null, null, null, null, null, null, null, null, "'as'", null, null, null, "'on'", "'with'", null, null, null, - null, null, null, null, null, null, "'info'", "'functions'" + null, null, null, null, null, null, null, "'info'", "'functions'", null, + null, null, "':'" }; } private static final String[] _LITERAL_NAMES = makeLiteralNames(); @@ -112,13 +117,14 @@ private static String[] makeSymbolicNames() { "SLASH", "PERCENT", "OPENING_BRACKET", "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", "EXPR_WS", "METADATA", "FROM_UNQUOTED_IDENTIFIER", "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", - "FROM_WS", "PROJECT_UNQUOTED_IDENTIFIER", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", + "FROM_WS", "UNQUOTED_ID_PATTERN", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", "PROJECT_WS", "AS", "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", - "RENAME_WS", "ON", "WITH", "ENRICH_LINE_COMMENT", "ENRICH_MULTILINE_COMMENT", - "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", "ENRICH_FIELD_MULTILINE_COMMENT", - "ENRICH_FIELD_WS", "MVEXPAND_LINE_COMMENT", "MVEXPAND_MULTILINE_COMMENT", - "MVEXPAND_WS", "INFO", "FUNCTIONS", "SHOW_LINE_COMMENT", "SHOW_MULTILINE_COMMENT", - "SHOW_WS" + "RENAME_WS", "ON", "WITH", "ENRICH_POLICY_NAME", "ENRICH_LINE_COMMENT", + "ENRICH_MULTILINE_COMMENT", "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", + "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", "MVEXPAND_LINE_COMMENT", + "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", "INFO", "FUNCTIONS", "SHOW_LINE_COMMENT", + "SHOW_MULTILINE_COMMENT", "SHOW_WS", "COLON", "SETTING", "SETTING_LINE_COMMENT", + "SETTTING_MULTILINE_COMMENT", "SETTING_WS" }; } private static final String[] _SYMBOLIC_NAMES = makeSymbolicNames(); @@ -181,683 +187,731 @@ public EsqlBaseLexer(CharStream input) { public ATN getATN() { return _ATN; } public static final String _serializedATN = - "\u0004\u0000b\u0442\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ + "\u0004\u0000h\u0484\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ - "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0002\u0000\u0007"+ - "\u0000\u0002\u0001\u0007\u0001\u0002\u0002\u0007\u0002\u0002\u0003\u0007"+ - "\u0003\u0002\u0004\u0007\u0004\u0002\u0005\u0007\u0005\u0002\u0006\u0007"+ - "\u0006\u0002\u0007\u0007\u0007\u0002\b\u0007\b\u0002\t\u0007\t\u0002\n"+ - "\u0007\n\u0002\u000b\u0007\u000b\u0002\f\u0007\f\u0002\r\u0007\r\u0002"+ - "\u000e\u0007\u000e\u0002\u000f\u0007\u000f\u0002\u0010\u0007\u0010\u0002"+ - "\u0011\u0007\u0011\u0002\u0012\u0007\u0012\u0002\u0013\u0007\u0013\u0002"+ - "\u0014\u0007\u0014\u0002\u0015\u0007\u0015\u0002\u0016\u0007\u0016\u0002"+ - "\u0017\u0007\u0017\u0002\u0018\u0007\u0018\u0002\u0019\u0007\u0019\u0002"+ - "\u001a\u0007\u001a\u0002\u001b\u0007\u001b\u0002\u001c\u0007\u001c\u0002"+ - "\u001d\u0007\u001d\u0002\u001e\u0007\u001e\u0002\u001f\u0007\u001f\u0002"+ - " \u0007 \u0002!\u0007!\u0002\"\u0007\"\u0002#\u0007#\u0002$\u0007$\u0002"+ - "%\u0007%\u0002&\u0007&\u0002\'\u0007\'\u0002(\u0007(\u0002)\u0007)\u0002"+ - "*\u0007*\u0002+\u0007+\u0002,\u0007,\u0002-\u0007-\u0002.\u0007.\u0002"+ - "/\u0007/\u00020\u00070\u00021\u00071\u00022\u00072\u00023\u00073\u0002"+ - "4\u00074\u00025\u00075\u00026\u00076\u00027\u00077\u00028\u00078\u0002"+ - "9\u00079\u0002:\u0007:\u0002;\u0007;\u0002<\u0007<\u0002=\u0007=\u0002"+ - ">\u0007>\u0002?\u0007?\u0002@\u0007@\u0002A\u0007A\u0002B\u0007B\u0002"+ - "C\u0007C\u0002D\u0007D\u0002E\u0007E\u0002F\u0007F\u0002G\u0007G\u0002"+ - "H\u0007H\u0002I\u0007I\u0002J\u0007J\u0002K\u0007K\u0002L\u0007L\u0002"+ - "M\u0007M\u0002N\u0007N\u0002O\u0007O\u0002P\u0007P\u0002Q\u0007Q\u0002"+ - "R\u0007R\u0002S\u0007S\u0002T\u0007T\u0002U\u0007U\u0002V\u0007V\u0002"+ - "W\u0007W\u0002X\u0007X\u0002Y\u0007Y\u0002Z\u0007Z\u0002[\u0007[\u0002"+ - "\\\u0007\\\u0002]\u0007]\u0002^\u0007^\u0002_\u0007_\u0002`\u0007`\u0002"+ - "a\u0007a\u0002b\u0007b\u0002c\u0007c\u0002d\u0007d\u0002e\u0007e\u0002"+ - "f\u0007f\u0002g\u0007g\u0002h\u0007h\u0002i\u0007i\u0002j\u0007j\u0002"+ - "k\u0007k\u0002l\u0007l\u0002m\u0007m\u0002n\u0007n\u0002o\u0007o\u0002"+ - "p\u0007p\u0002q\u0007q\u0002r\u0007r\u0002s\u0007s\u0002t\u0007t\u0002"+ - "u\u0007u\u0002v\u0007v\u0002w\u0007w\u0002x\u0007x\u0002y\u0007y\u0002"+ - "z\u0007z\u0002{\u0007{\u0002|\u0007|\u0002}\u0007}\u0002~\u0007~\u0002"+ - "\u007f\u0007\u007f\u0002\u0080\u0007\u0080\u0002\u0081\u0007\u0081\u0002"+ - "\u0082\u0007\u0082\u0002\u0083\u0007\u0083\u0002\u0084\u0007\u0084\u0002"+ - "\u0085\u0007\u0085\u0002\u0086\u0007\u0086\u0002\u0087\u0007\u0087\u0002"+ - "\u0088\u0007\u0088\u0002\u0089\u0007\u0089\u0002\u008a\u0007\u008a\u0002"+ - "\u008b\u0007\u008b\u0002\u008c\u0007\u008c\u0002\u008d\u0007\u008d\u0002"+ - "\u008e\u0007\u008e\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001"+ + "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ + "\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002\u0002\u0007\u0002"+ + "\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004\u0002\u0005\u0007\u0005"+ + "\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007\u0002\b\u0007\b\u0002"+ + "\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b\u0002\f\u0007\f\u0002"+ + "\r\u0007\r\u0002\u000e\u0007\u000e\u0002\u000f\u0007\u000f\u0002\u0010"+ + "\u0007\u0010\u0002\u0011\u0007\u0011\u0002\u0012\u0007\u0012\u0002\u0013"+ + "\u0007\u0013\u0002\u0014\u0007\u0014\u0002\u0015\u0007\u0015\u0002\u0016"+ + "\u0007\u0016\u0002\u0017\u0007\u0017\u0002\u0018\u0007\u0018\u0002\u0019"+ + "\u0007\u0019\u0002\u001a\u0007\u001a\u0002\u001b\u0007\u001b\u0002\u001c"+ + "\u0007\u001c\u0002\u001d\u0007\u001d\u0002\u001e\u0007\u001e\u0002\u001f"+ + "\u0007\u001f\u0002 \u0007 \u0002!\u0007!\u0002\"\u0007\"\u0002#\u0007"+ + "#\u0002$\u0007$\u0002%\u0007%\u0002&\u0007&\u0002\'\u0007\'\u0002(\u0007"+ + "(\u0002)\u0007)\u0002*\u0007*\u0002+\u0007+\u0002,\u0007,\u0002-\u0007"+ + "-\u0002.\u0007.\u0002/\u0007/\u00020\u00070\u00021\u00071\u00022\u0007"+ + "2\u00023\u00073\u00024\u00074\u00025\u00075\u00026\u00076\u00027\u0007"+ + "7\u00028\u00078\u00029\u00079\u0002:\u0007:\u0002;\u0007;\u0002<\u0007"+ + "<\u0002=\u0007=\u0002>\u0007>\u0002?\u0007?\u0002@\u0007@\u0002A\u0007"+ + "A\u0002B\u0007B\u0002C\u0007C\u0002D\u0007D\u0002E\u0007E\u0002F\u0007"+ + "F\u0002G\u0007G\u0002H\u0007H\u0002I\u0007I\u0002J\u0007J\u0002K\u0007"+ + "K\u0002L\u0007L\u0002M\u0007M\u0002N\u0007N\u0002O\u0007O\u0002P\u0007"+ + "P\u0002Q\u0007Q\u0002R\u0007R\u0002S\u0007S\u0002T\u0007T\u0002U\u0007"+ + "U\u0002V\u0007V\u0002W\u0007W\u0002X\u0007X\u0002Y\u0007Y\u0002Z\u0007"+ + "Z\u0002[\u0007[\u0002\\\u0007\\\u0002]\u0007]\u0002^\u0007^\u0002_\u0007"+ + "_\u0002`\u0007`\u0002a\u0007a\u0002b\u0007b\u0002c\u0007c\u0002d\u0007"+ + "d\u0002e\u0007e\u0002f\u0007f\u0002g\u0007g\u0002h\u0007h\u0002i\u0007"+ + "i\u0002j\u0007j\u0002k\u0007k\u0002l\u0007l\u0002m\u0007m\u0002n\u0007"+ + "n\u0002o\u0007o\u0002p\u0007p\u0002q\u0007q\u0002r\u0007r\u0002s\u0007"+ + "s\u0002t\u0007t\u0002u\u0007u\u0002v\u0007v\u0002w\u0007w\u0002x\u0007"+ + "x\u0002y\u0007y\u0002z\u0007z\u0002{\u0007{\u0002|\u0007|\u0002}\u0007"+ + "}\u0002~\u0007~\u0002\u007f\u0007\u007f\u0002\u0080\u0007\u0080\u0002"+ + "\u0081\u0007\u0081\u0002\u0082\u0007\u0082\u0002\u0083\u0007\u0083\u0002"+ + "\u0084\u0007\u0084\u0002\u0085\u0007\u0085\u0002\u0086\u0007\u0086\u0002"+ + "\u0087\u0007\u0087\u0002\u0088\u0007\u0088\u0002\u0089\u0007\u0089\u0002"+ + "\u008a\u0007\u008a\u0002\u008b\u0007\u008b\u0002\u008c\u0007\u008c\u0002"+ + "\u008d\u0007\u008d\u0002\u008e\u0007\u008e\u0002\u008f\u0007\u008f\u0002"+ + "\u0090\u0007\u0090\u0002\u0091\u0007\u0091\u0002\u0092\u0007\u0092\u0002"+ + "\u0093\u0007\u0093\u0002\u0094\u0007\u0094\u0002\u0095\u0007\u0095\u0002"+ + "\u0096\u0007\u0096\u0002\u0097\u0007\u0097\u0002\u0098\u0007\u0098\u0001"+ "\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001"+ - "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ - "\u0001\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001"+ - "\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0003\u0001\u0003\u0001"+ - "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0004\u0001"+ + "\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0001\u0001\u0001\u0001"+ + "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0002\u0001"+ + "\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001"+ + "\u0002\u0001\u0002\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ + "\u0003\u0001\u0003\u0001\u0003\u0001\u0004\u0001\u0004\u0001\u0004\u0001"+ "\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001"+ - "\u0004\u0001\u0004\u0001\u0004\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ - "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0006\u0001\u0006\u0001"+ - "\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0007\u0001"+ - "\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001"+ + "\u0004\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ + "\u0005\u0001\u0005\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001"+ + "\u0006\u0001\u0006\u0001\u0006\u0001\u0007\u0001\u0007\u0001\u0007\u0001"+ "\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001"+ - "\u0007\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\t"+ - "\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\n\u0001"+ - "\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001"+ - "\n\u0001\n\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b"+ - "\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\f\u0001"+ - "\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\r\u0001"+ - "\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\u000e\u0001\u000e\u0001\u000e"+ - "\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000f\u0001\u000f"+ - "\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u0010"+ - "\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010"+ - "\u0001\u0010\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011"+ - "\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0012\u0004\u0012\u01c3\b\u0012"+ - "\u000b\u0012\f\u0012\u01c4\u0001\u0012\u0001\u0012\u0001\u0013\u0001\u0013"+ - "\u0001\u0013\u0001\u0013\u0005\u0013\u01cd\b\u0013\n\u0013\f\u0013\u01d0"+ - "\t\u0013\u0001\u0013\u0003\u0013\u01d3\b\u0013\u0001\u0013\u0003\u0013"+ - "\u01d6\b\u0013\u0001\u0013\u0001\u0013\u0001\u0014\u0001\u0014\u0001\u0014"+ - "\u0001\u0014\u0001\u0014\u0005\u0014\u01df\b\u0014\n\u0014\f\u0014\u01e2"+ - "\t\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001"+ - "\u0015\u0004\u0015\u01ea\b\u0015\u000b\u0015\f\u0015\u01eb\u0001\u0015"+ - "\u0001\u0015\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016"+ - "\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0018"+ - "\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0019\u0001\u0019\u0001\u0019"+ - "\u0001\u0019\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001b"+ - "\u0001\u001b\u0001\u001b\u0001\u001b\u0001\u001c\u0001\u001c\u0001\u001d"+ - "\u0001\u001d\u0001\u001e\u0001\u001e\u0001\u001e\u0001\u001f\u0001\u001f"+ - "\u0001 \u0001 \u0003 \u0215\b \u0001 \u0004 \u0218\b \u000b \f \u0219"+ - "\u0001!\u0001!\u0001\"\u0001\"\u0001#\u0001#\u0001#\u0003#\u0223\b#\u0001"+ - "$\u0001$\u0001%\u0001%\u0001%\u0003%\u022a\b%\u0001&\u0001&\u0001&\u0005"+ - "&\u022f\b&\n&\f&\u0232\t&\u0001&\u0001&\u0001&\u0001&\u0001&\u0001&\u0005"+ - "&\u023a\b&\n&\f&\u023d\t&\u0001&\u0001&\u0001&\u0001&\u0001&\u0003&\u0244"+ - "\b&\u0001&\u0003&\u0247\b&\u0003&\u0249\b&\u0001\'\u0004\'\u024c\b\'\u000b"+ - "\'\f\'\u024d\u0001(\u0004(\u0251\b(\u000b(\f(\u0252\u0001(\u0001(\u0005"+ - "(\u0257\b(\n(\f(\u025a\t(\u0001(\u0001(\u0004(\u025e\b(\u000b(\f(\u025f"+ - "\u0001(\u0004(\u0263\b(\u000b(\f(\u0264\u0001(\u0001(\u0005(\u0269\b("+ - "\n(\f(\u026c\t(\u0003(\u026e\b(\u0001(\u0001(\u0001(\u0001(\u0004(\u0274"+ - "\b(\u000b(\f(\u0275\u0001(\u0001(\u0003(\u027a\b(\u0001)\u0001)\u0001"+ - ")\u0001*\u0001*\u0001*\u0001*\u0001+\u0001+\u0001+\u0001+\u0001,\u0001"+ - ",\u0001-\u0001-\u0001.\u0001.\u0001.\u0001.\u0001.\u0001/\u0001/\u0001"+ - "0\u00010\u00010\u00010\u00010\u00010\u00011\u00011\u00011\u00011\u0001"+ - "1\u00011\u00012\u00012\u00012\u00012\u00012\u00013\u00013\u00014\u0001"+ - "4\u00014\u00015\u00015\u00015\u00016\u00016\u00016\u00016\u00016\u0001"+ - "7\u00017\u00017\u00017\u00018\u00018\u00018\u00018\u00018\u00019\u0001"+ - "9\u00019\u00019\u00019\u00019\u0001:\u0001:\u0001:\u0001;\u0001;\u0001"+ - "<\u0001<\u0001<\u0001<\u0001<\u0001<\u0001=\u0001=\u0001>\u0001>\u0001"+ - ">\u0001>\u0001>\u0001?\u0001?\u0001?\u0001@\u0001@\u0001@\u0001A\u0001"+ - "A\u0001B\u0001B\u0001B\u0001C\u0001C\u0001D\u0001D\u0001D\u0001E\u0001"+ - "E\u0001F\u0001F\u0001G\u0001G\u0001H\u0001H\u0001I\u0001I\u0001J\u0001"+ - "J\u0001J\u0001J\u0001J\u0001K\u0001K\u0001K\u0001K\u0001K\u0001L\u0001"+ - "L\u0005L\u02f7\bL\nL\fL\u02fa\tL\u0001L\u0001L\u0003L\u02fe\bL\u0001L"+ - "\u0004L\u0301\bL\u000bL\fL\u0302\u0003L\u0305\bL\u0001M\u0001M\u0004M"+ - "\u0309\bM\u000bM\fM\u030a\u0001M\u0001M\u0001N\u0001N\u0001N\u0001N\u0001"+ - "O\u0001O\u0001O\u0001O\u0001P\u0001P\u0001P\u0001P\u0001Q\u0001Q\u0001"+ - "Q\u0001Q\u0001Q\u0001R\u0001R\u0001R\u0001R\u0001R\u0001R\u0001S\u0001"+ - "S\u0001S\u0001S\u0001S\u0001S\u0001T\u0001T\u0001T\u0001T\u0001U\u0001"+ - "U\u0001U\u0001U\u0001V\u0001V\u0001V\u0001V\u0001V\u0001V\u0001V\u0001"+ - "V\u0001V\u0001W\u0001W\u0001W\u0003W\u0340\bW\u0001X\u0004X\u0343\bX\u000b"+ - "X\fX\u0344\u0001Y\u0001Y\u0001Y\u0001Y\u0001Z\u0001Z\u0001Z\u0001Z\u0001"+ - "[\u0001[\u0001[\u0001[\u0001\\\u0001\\\u0001\\\u0001\\\u0001]\u0001]\u0001"+ - "]\u0001]\u0001]\u0001^\u0001^\u0001^\u0001^\u0001_\u0001_\u0001_\u0001"+ - "_\u0001`\u0001`\u0001`\u0001`\u0003`\u0368\b`\u0001a\u0001a\u0003a\u036c"+ - "\ba\u0001a\u0005a\u036f\ba\na\fa\u0372\ta\u0001a\u0001a\u0003a\u0376\b"+ - "a\u0001a\u0004a\u0379\ba\u000ba\fa\u037a\u0003a\u037d\ba\u0001b\u0001"+ - "b\u0001b\u0001b\u0001c\u0001c\u0001c\u0001c\u0001d\u0001d\u0001d\u0001"+ - "d\u0001e\u0001e\u0001e\u0001e\u0001f\u0001f\u0001f\u0001f\u0001f\u0001"+ - "g\u0001g\u0001g\u0001g\u0001h\u0001h\u0001h\u0001h\u0001i\u0001i\u0001"+ - "i\u0001i\u0001j\u0001j\u0001j\u0001k\u0001k\u0001k\u0001k\u0001l\u0001"+ - "l\u0001l\u0001l\u0001m\u0001m\u0001m\u0001m\u0001n\u0001n\u0001n\u0001"+ - "n\u0001o\u0001o\u0001o\u0001o\u0001p\u0001p\u0001p\u0001p\u0001p\u0001"+ - "q\u0001q\u0001q\u0001q\u0001q\u0001r\u0001r\u0001r\u0001r\u0001r\u0001"+ - "r\u0001r\u0001s\u0001s\u0001s\u0001s\u0001t\u0001t\u0001t\u0001t\u0001"+ - "u\u0001u\u0001u\u0001u\u0001v\u0001v\u0001v\u0001v\u0001w\u0001w\u0001"+ - "w\u0001w\u0001x\u0001x\u0001x\u0001x\u0001x\u0001x\u0001y\u0001y\u0001"+ - "y\u0001y\u0001z\u0001z\u0001z\u0001z\u0001{\u0001{\u0001{\u0001{\u0001"+ + "\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\b\u0001\b"+ + "\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\t\u0001\t\u0001\t\u0001"+ + "\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\n\u0001\n\u0001\n\u0001\n\u0001"+ + "\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\u000b"+ + "\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b"+ + "\u0001\u000b\u0001\u000b\u0001\u000b\u0001\f\u0001\f\u0001\f\u0001\f\u0001"+ + "\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\r\u0001\r\u0001\r\u0001\r\u0001"+ + "\r\u0001\r\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e"+ + "\u0001\u000e\u0001\u000e\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f"+ + "\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u0010\u0001\u0010\u0001\u0010"+ + "\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0011"+ + "\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011"+ + "\u0001\u0011\u0001\u0012\u0004\u0012\u01d8\b\u0012\u000b\u0012\f\u0012"+ + "\u01d9\u0001\u0012\u0001\u0012\u0001\u0013\u0001\u0013\u0001\u0013\u0001"+ + "\u0013\u0005\u0013\u01e2\b\u0013\n\u0013\f\u0013\u01e5\t\u0013\u0001\u0013"+ + "\u0003\u0013\u01e8\b\u0013\u0001\u0013\u0003\u0013\u01eb\b\u0013\u0001"+ + "\u0013\u0001\u0013\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001"+ + "\u0014\u0005\u0014\u01f4\b\u0014\n\u0014\f\u0014\u01f7\t\u0014\u0001\u0014"+ + "\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0015\u0004\u0015"+ + "\u01ff\b\u0015\u000b\u0015\f\u0015\u0200\u0001\u0015\u0001\u0015\u0001"+ + "\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0017\u0001"+ + "\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0018\u0001\u0018\u0001"+ + "\u0018\u0001\u0018\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u0019\u0001"+ + "\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001b\u0001\u001b\u0001"+ + "\u001b\u0001\u001b\u0001\u001c\u0001\u001c\u0001\u001d\u0001\u001d\u0001"+ + "\u001e\u0001\u001e\u0001\u001e\u0001\u001f\u0001\u001f\u0001 \u0001 \u0003"+ + " \u022a\b \u0001 \u0004 \u022d\b \u000b \f \u022e\u0001!\u0001!\u0001"+ + "\"\u0001\"\u0001#\u0001#\u0001#\u0003#\u0238\b#\u0001$\u0001$\u0001%\u0001"+ + "%\u0001%\u0003%\u023f\b%\u0001&\u0001&\u0001&\u0005&\u0244\b&\n&\f&\u0247"+ + "\t&\u0001&\u0001&\u0001&\u0001&\u0001&\u0001&\u0005&\u024f\b&\n&\f&\u0252"+ + "\t&\u0001&\u0001&\u0001&\u0001&\u0001&\u0003&\u0259\b&\u0001&\u0003&\u025c"+ + "\b&\u0003&\u025e\b&\u0001\'\u0004\'\u0261\b\'\u000b\'\f\'\u0262\u0001"+ + "(\u0004(\u0266\b(\u000b(\f(\u0267\u0001(\u0001(\u0005(\u026c\b(\n(\f("+ + "\u026f\t(\u0001(\u0001(\u0004(\u0273\b(\u000b(\f(\u0274\u0001(\u0004("+ + "\u0278\b(\u000b(\f(\u0279\u0001(\u0001(\u0005(\u027e\b(\n(\f(\u0281\t"+ + "(\u0003(\u0283\b(\u0001(\u0001(\u0001(\u0001(\u0004(\u0289\b(\u000b(\f"+ + "(\u028a\u0001(\u0001(\u0003(\u028f\b(\u0001)\u0001)\u0001)\u0001*\u0001"+ + "*\u0001*\u0001*\u0001+\u0001+\u0001+\u0001+\u0001,\u0001,\u0001-\u0001"+ + "-\u0001.\u0001.\u0001.\u0001.\u0001.\u0001/\u0001/\u00010\u00010\u0001"+ + "0\u00010\u00010\u00010\u00011\u00011\u00011\u00011\u00011\u00011\u0001"+ + "2\u00012\u00012\u00012\u00012\u00013\u00013\u00014\u00014\u00014\u0001"+ + "5\u00015\u00015\u00016\u00016\u00016\u00016\u00016\u00017\u00017\u0001"+ + "7\u00017\u00018\u00018\u00018\u00018\u00018\u00019\u00019\u00019\u0001"+ + "9\u00019\u00019\u0001:\u0001:\u0001:\u0001;\u0001;\u0001<\u0001<\u0001"+ + "<\u0001<\u0001<\u0001<\u0001=\u0001=\u0001>\u0001>\u0001>\u0001>\u0001"+ + ">\u0001?\u0001?\u0001?\u0001@\u0001@\u0001@\u0001A\u0001A\u0001B\u0001"+ + "B\u0001B\u0001C\u0001C\u0001D\u0001D\u0001D\u0001E\u0001E\u0001F\u0001"+ + "F\u0001G\u0001G\u0001H\u0001H\u0001I\u0001I\u0001J\u0001J\u0001J\u0001"+ + "J\u0001J\u0001K\u0001K\u0001K\u0001K\u0001K\u0001L\u0001L\u0005L\u030c"+ + "\bL\nL\fL\u030f\tL\u0001L\u0001L\u0003L\u0313\bL\u0001L\u0004L\u0316\b"+ + "L\u000bL\fL\u0317\u0003L\u031a\bL\u0001M\u0001M\u0004M\u031e\bM\u000b"+ + "M\fM\u031f\u0001M\u0001M\u0001N\u0001N\u0001N\u0001N\u0001O\u0001O\u0001"+ + "O\u0001O\u0001P\u0001P\u0001P\u0001P\u0001Q\u0001Q\u0001Q\u0001Q\u0001"+ + "Q\u0001R\u0001R\u0001R\u0001R\u0001S\u0001S\u0001S\u0001S\u0001T\u0001"+ + "T\u0001T\u0001T\u0001U\u0001U\u0001U\u0001U\u0001V\u0001V\u0001V\u0001"+ + "V\u0001V\u0001V\u0001V\u0001V\u0001V\u0001W\u0001W\u0001W\u0003W\u0351"+ + "\bW\u0001X\u0004X\u0354\bX\u000bX\fX\u0355\u0001Y\u0001Y\u0001Y\u0001"+ + "Y\u0001Z\u0001Z\u0001Z\u0001Z\u0001[\u0001[\u0001[\u0001[\u0001\\\u0001"+ + "\\\u0001\\\u0001\\\u0001]\u0001]\u0001]\u0001]\u0001]\u0001^\u0001^\u0001"+ + "^\u0001^\u0001_\u0001_\u0001_\u0001_\u0001`\u0001`\u0001`\u0001`\u0003"+ + "`\u0379\b`\u0001a\u0001a\u0003a\u037d\ba\u0001a\u0005a\u0380\ba\na\fa"+ + "\u0383\ta\u0001a\u0001a\u0003a\u0387\ba\u0001a\u0004a\u038a\ba\u000ba"+ + "\fa\u038b\u0003a\u038e\ba\u0001b\u0001b\u0001b\u0001b\u0001c\u0001c\u0001"+ + "c\u0001c\u0001d\u0001d\u0001d\u0001d\u0001e\u0001e\u0001e\u0001e\u0001"+ + "f\u0001f\u0001f\u0001f\u0001g\u0001g\u0001g\u0001g\u0001g\u0001h\u0001"+ + "h\u0001h\u0001h\u0001i\u0001i\u0001i\u0001i\u0001j\u0001j\u0001j\u0001"+ + "j\u0001k\u0001k\u0001k\u0001l\u0001l\u0001l\u0001l\u0001m\u0001m\u0001"+ + "m\u0001m\u0001n\u0001n\u0001n\u0001n\u0001o\u0001o\u0001o\u0001o\u0001"+ + "p\u0001p\u0001p\u0001p\u0001q\u0001q\u0001q\u0001q\u0001q\u0001r\u0001"+ + "r\u0001r\u0001r\u0001r\u0001s\u0001s\u0001s\u0001s\u0001s\u0001t\u0001"+ + "t\u0001t\u0001t\u0001t\u0001t\u0001t\u0001u\u0001u\u0001v\u0001v\u0003"+ + "v\u03e6\bv\u0001v\u0005v\u03e9\bv\nv\fv\u03ec\tv\u0001w\u0001w\u0001w"+ + "\u0001w\u0001x\u0001x\u0001x\u0001x\u0001y\u0001y\u0001y\u0001y\u0001"+ + "z\u0001z\u0001z\u0001z\u0001{\u0001{\u0001{\u0001{\u0001|\u0001|\u0001"+ "|\u0001|\u0001|\u0001|\u0001}\u0001}\u0001}\u0001}\u0001~\u0001~\u0001"+ "~\u0001~\u0001\u007f\u0001\u007f\u0001\u007f\u0001\u007f\u0001\u0080\u0001"+ "\u0080\u0001\u0080\u0001\u0080\u0001\u0081\u0001\u0081\u0001\u0081\u0001"+ - "\u0081\u0001\u0082\u0001\u0082\u0001\u0082\u0001\u0082\u0001\u0082\u0001"+ - "\u0083\u0001\u0083\u0001\u0083\u0001\u0083\u0001\u0084\u0001\u0084\u0001"+ - "\u0084\u0001\u0084\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0085\u0001"+ + "\u0081\u0001\u0082\u0001\u0082\u0001\u0082\u0001\u0082\u0001\u0083\u0001"+ + "\u0083\u0001\u0083\u0001\u0083\u0001\u0084\u0001\u0084\u0001\u0084\u0001"+ + "\u0084\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0086\u0001"+ "\u0086\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0087\u0001\u0087\u0001"+ "\u0087\u0001\u0087\u0001\u0088\u0001\u0088\u0001\u0088\u0001\u0088\u0001"+ - "\u0089\u0001\u0089\u0001\u0089\u0001\u0089\u0001\u0089\u0001\u008a\u0001"+ - "\u008a\u0001\u008a\u0001\u008a\u0001\u008a\u0001\u008b\u0001\u008b\u0001"+ - "\u008b\u0001\u008b\u0001\u008b\u0001\u008b\u0001\u008b\u0001\u008b\u0001"+ - "\u008b\u0001\u008b\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008c\u0001"+ - "\u008d\u0001\u008d\u0001\u008d\u0001\u008d\u0001\u008e\u0001\u008e\u0001"+ - "\u008e\u0001\u008e\u0002\u01e0\u023b\u0000\u008f\n\u0001\f\u0002\u000e"+ - "\u0003\u0010\u0004\u0012\u0005\u0014\u0006\u0016\u0007\u0018\b\u001a\t"+ - "\u001c\n\u001e\u000b \f\"\r$\u000e&\u000f(\u0010*\u0011,\u0012.\u0013"+ - "0\u00142\u00154\u00166\u00008\u0000:\u0017<\u0018>\u0019@\u001aB\u0000"+ - "D\u0000F\u0000H\u0000J\u0000L\u0000N\u0000P\u0000R\u0000T\u0000V\u001b"+ - "X\u001cZ\u001d\\\u001e^\u001f` b!d\"f#h$j%l&n\'p(r)t*v+x,z-|.~/\u0080"+ - "0\u00821\u00842\u00863\u00884\u008a5\u008c6\u008e7\u00908\u00929\u0094"+ - ":\u0096;\u0098<\u009a=\u009c>\u009e?\u00a0@\u00a2A\u00a4B\u00a6C\u00a8"+ - "D\u00aaE\u00ac\u0000\u00ae\u0000\u00b0\u0000\u00b2\u0000\u00b4\u0000\u00b6"+ - "F\u00b8\u0000\u00baG\u00bc\u0000\u00beH\u00c0I\u00c2J\u00c4\u0000\u00c6"+ - "\u0000\u00c8\u0000\u00ca\u0000\u00ccK\u00ce\u0000\u00d0L\u00d2M\u00d4"+ - "N\u00d6\u0000\u00d8\u0000\u00da\u0000\u00dc\u0000\u00deO\u00e0\u0000\u00e2"+ - "\u0000\u00e4P\u00e6Q\u00e8R\u00ea\u0000\u00ecS\u00eeT\u00f0\u0000\u00f2"+ - "\u0000\u00f4U\u00f6V\u00f8W\u00fa\u0000\u00fc\u0000\u00fe\u0000\u0100"+ - "\u0000\u0102\u0000\u0104\u0000\u0106\u0000\u0108X\u010aY\u010cZ\u010e"+ - "\u0000\u0110\u0000\u0112\u0000\u0114\u0000\u0116[\u0118\\\u011a]\u011c"+ - "\u0000\u011e^\u0120_\u0122`\u0124a\u0126b\n\u0000\u0001\u0002\u0003\u0004"+ - "\u0005\u0006\u0007\b\t\f\u0006\u0000\t\n\r\r //[[]]\u0002\u0000\n\n\r"+ - "\r\u0003\u0000\t\n\r\r \u0001\u000009\u0002\u0000AZaz\u0005\u0000\"\""+ - "\\\\nnrrtt\u0004\u0000\n\n\r\r\"\"\\\\\u0002\u0000EEee\u0002\u0000++-"+ - "-\u0001\u0000``\n\u0000\t\n\r\r ,,//==[[]]``||\u0002\u0000**//\u0458"+ - "\u0000\n\u0001\u0000\u0000\u0000\u0000\f\u0001\u0000\u0000\u0000\u0000"+ - "\u000e\u0001\u0000\u0000\u0000\u0000\u0010\u0001\u0000\u0000\u0000\u0000"+ - "\u0012\u0001\u0000\u0000\u0000\u0000\u0014\u0001\u0000\u0000\u0000\u0000"+ - "\u0016\u0001\u0000\u0000\u0000\u0000\u0018\u0001\u0000\u0000\u0000\u0000"+ - "\u001a\u0001\u0000\u0000\u0000\u0000\u001c\u0001\u0000\u0000\u0000\u0000"+ - "\u001e\u0001\u0000\u0000\u0000\u0000 \u0001\u0000\u0000\u0000\u0000\""+ - "\u0001\u0000\u0000\u0000\u0000$\u0001\u0000\u0000\u0000\u0000&\u0001\u0000"+ - "\u0000\u0000\u0000(\u0001\u0000\u0000\u0000\u0000*\u0001\u0000\u0000\u0000"+ - "\u0000,\u0001\u0000\u0000\u0000\u0000.\u0001\u0000\u0000\u0000\u00000"+ - "\u0001\u0000\u0000\u0000\u00002\u0001\u0000\u0000\u0000\u00004\u0001\u0000"+ - "\u0000\u0000\u00016\u0001\u0000\u0000\u0000\u00018\u0001\u0000\u0000\u0000"+ - "\u0001:\u0001\u0000\u0000\u0000\u0001<\u0001\u0000\u0000\u0000\u0001>"+ - "\u0001\u0000\u0000\u0000\u0002@\u0001\u0000\u0000\u0000\u0002V\u0001\u0000"+ - "\u0000\u0000\u0002X\u0001\u0000\u0000\u0000\u0002Z\u0001\u0000\u0000\u0000"+ - "\u0002\\\u0001\u0000\u0000\u0000\u0002^\u0001\u0000\u0000\u0000\u0002"+ - "`\u0001\u0000\u0000\u0000\u0002b\u0001\u0000\u0000\u0000\u0002d\u0001"+ - "\u0000\u0000\u0000\u0002f\u0001\u0000\u0000\u0000\u0002h\u0001\u0000\u0000"+ - "\u0000\u0002j\u0001\u0000\u0000\u0000\u0002l\u0001\u0000\u0000\u0000\u0002"+ - "n\u0001\u0000\u0000\u0000\u0002p\u0001\u0000\u0000\u0000\u0002r\u0001"+ - "\u0000\u0000\u0000\u0002t\u0001\u0000\u0000\u0000\u0002v\u0001\u0000\u0000"+ - "\u0000\u0002x\u0001\u0000\u0000\u0000\u0002z\u0001\u0000\u0000\u0000\u0002"+ - "|\u0001\u0000\u0000\u0000\u0002~\u0001\u0000\u0000\u0000\u0002\u0080\u0001"+ - "\u0000\u0000\u0000\u0002\u0082\u0001\u0000\u0000\u0000\u0002\u0084\u0001"+ - "\u0000\u0000\u0000\u0002\u0086\u0001\u0000\u0000\u0000\u0002\u0088\u0001"+ - "\u0000\u0000\u0000\u0002\u008a\u0001\u0000\u0000\u0000\u0002\u008c\u0001"+ - "\u0000\u0000\u0000\u0002\u008e\u0001\u0000\u0000\u0000\u0002\u0090\u0001"+ - "\u0000\u0000\u0000\u0002\u0092\u0001\u0000\u0000\u0000\u0002\u0094\u0001"+ - "\u0000\u0000\u0000\u0002\u0096\u0001\u0000\u0000\u0000\u0002\u0098\u0001"+ - "\u0000\u0000\u0000\u0002\u009a\u0001\u0000\u0000\u0000\u0002\u009c\u0001"+ - "\u0000\u0000\u0000\u0002\u009e\u0001\u0000\u0000\u0000\u0002\u00a0\u0001"+ - "\u0000\u0000\u0000\u0002\u00a2\u0001\u0000\u0000\u0000\u0002\u00a4\u0001"+ - "\u0000\u0000\u0000\u0002\u00a6\u0001\u0000\u0000\u0000\u0002\u00a8\u0001"+ - "\u0000\u0000\u0000\u0002\u00aa\u0001\u0000\u0000\u0000\u0003\u00ac\u0001"+ - "\u0000\u0000\u0000\u0003\u00ae\u0001\u0000\u0000\u0000\u0003\u00b0\u0001"+ - "\u0000\u0000\u0000\u0003\u00b2\u0001\u0000\u0000\u0000\u0003\u00b4\u0001"+ - "\u0000\u0000\u0000\u0003\u00b6\u0001\u0000\u0000\u0000\u0003\u00ba\u0001"+ - "\u0000\u0000\u0000\u0003\u00bc\u0001\u0000\u0000\u0000\u0003\u00be\u0001"+ - "\u0000\u0000\u0000\u0003\u00c0\u0001\u0000\u0000\u0000\u0003\u00c2\u0001"+ - "\u0000\u0000\u0000\u0004\u00c4\u0001\u0000\u0000\u0000\u0004\u00c6\u0001"+ - "\u0000\u0000\u0000\u0004\u00c8\u0001\u0000\u0000\u0000\u0004\u00cc\u0001"+ - "\u0000\u0000\u0000\u0004\u00ce\u0001\u0000\u0000\u0000\u0004\u00d0\u0001"+ - "\u0000\u0000\u0000\u0004\u00d2\u0001\u0000\u0000\u0000\u0004\u00d4\u0001"+ - "\u0000\u0000\u0000\u0005\u00d6\u0001\u0000\u0000\u0000\u0005\u00d8\u0001"+ - "\u0000\u0000\u0000\u0005\u00da\u0001\u0000\u0000\u0000\u0005\u00dc\u0001"+ - "\u0000\u0000\u0000\u0005\u00de\u0001\u0000\u0000\u0000\u0005\u00e0\u0001"+ - "\u0000\u0000\u0000\u0005\u00e2\u0001\u0000\u0000\u0000\u0005\u00e4\u0001"+ - "\u0000\u0000\u0000\u0005\u00e6\u0001\u0000\u0000\u0000\u0005\u00e8\u0001"+ - "\u0000\u0000\u0000\u0006\u00ea\u0001\u0000\u0000\u0000\u0006\u00ec\u0001"+ - "\u0000\u0000\u0000\u0006\u00ee\u0001\u0000\u0000\u0000\u0006\u00f0\u0001"+ - "\u0000\u0000\u0000\u0006\u00f2\u0001\u0000\u0000\u0000\u0006\u00f4\u0001"+ - "\u0000\u0000\u0000\u0006\u00f6\u0001\u0000\u0000\u0000\u0006\u00f8\u0001"+ - "\u0000\u0000\u0000\u0007\u00fa\u0001\u0000\u0000\u0000\u0007\u00fc\u0001"+ - "\u0000\u0000\u0000\u0007\u00fe\u0001\u0000\u0000\u0000\u0007\u0100\u0001"+ - "\u0000\u0000\u0000\u0007\u0102\u0001\u0000\u0000\u0000\u0007\u0104\u0001"+ - "\u0000\u0000\u0000\u0007\u0106\u0001\u0000\u0000\u0000\u0007\u0108\u0001"+ - "\u0000\u0000\u0000\u0007\u010a\u0001\u0000\u0000\u0000\u0007\u010c\u0001"+ - "\u0000\u0000\u0000\b\u010e\u0001\u0000\u0000\u0000\b\u0110\u0001\u0000"+ - "\u0000\u0000\b\u0112\u0001\u0000\u0000\u0000\b\u0114\u0001\u0000\u0000"+ - "\u0000\b\u0116\u0001\u0000\u0000\u0000\b\u0118\u0001\u0000\u0000\u0000"+ - "\b\u011a\u0001\u0000\u0000\u0000\t\u011c\u0001\u0000\u0000\u0000\t\u011e"+ - "\u0001\u0000\u0000\u0000\t\u0120\u0001\u0000\u0000\u0000\t\u0122\u0001"+ - "\u0000\u0000\u0000\t\u0124\u0001\u0000\u0000\u0000\t\u0126\u0001\u0000"+ - "\u0000\u0000\n\u0128\u0001\u0000\u0000\u0000\f\u0132\u0001\u0000\u0000"+ - "\u0000\u000e\u0139\u0001\u0000\u0000\u0000\u0010\u0142\u0001\u0000\u0000"+ - "\u0000\u0012\u0149\u0001\u0000\u0000\u0000\u0014\u0153\u0001\u0000\u0000"+ - "\u0000\u0016\u015a\u0001\u0000\u0000\u0000\u0018\u0161\u0001\u0000\u0000"+ - "\u0000\u001a\u016f\u0001\u0000\u0000\u0000\u001c\u0176\u0001\u0000\u0000"+ - "\u0000\u001e\u017e\u0001\u0000\u0000\u0000 \u018a\u0001\u0000\u0000\u0000"+ - "\"\u0194\u0001\u0000\u0000\u0000$\u019d\u0001\u0000\u0000\u0000&\u01a3"+ - "\u0001\u0000\u0000\u0000(\u01aa\u0001\u0000\u0000\u0000*\u01b1\u0001\u0000"+ - "\u0000\u0000,\u01b9\u0001\u0000\u0000\u0000.\u01c2\u0001\u0000\u0000\u0000"+ - "0\u01c8\u0001\u0000\u0000\u00002\u01d9\u0001\u0000\u0000\u00004\u01e9"+ - "\u0001\u0000\u0000\u00006\u01ef\u0001\u0000\u0000\u00008\u01f4\u0001\u0000"+ - "\u0000\u0000:\u01f9\u0001\u0000\u0000\u0000<\u01fd\u0001\u0000\u0000\u0000"+ - ">\u0201\u0001\u0000\u0000\u0000@\u0205\u0001\u0000\u0000\u0000B\u0209"+ - "\u0001\u0000\u0000\u0000D\u020b\u0001\u0000\u0000\u0000F\u020d\u0001\u0000"+ - "\u0000\u0000H\u0210\u0001\u0000\u0000\u0000J\u0212\u0001\u0000\u0000\u0000"+ - "L\u021b\u0001\u0000\u0000\u0000N\u021d\u0001\u0000\u0000\u0000P\u0222"+ - "\u0001\u0000\u0000\u0000R\u0224\u0001\u0000\u0000\u0000T\u0229\u0001\u0000"+ - "\u0000\u0000V\u0248\u0001\u0000\u0000\u0000X\u024b\u0001\u0000\u0000\u0000"+ - "Z\u0279\u0001\u0000\u0000\u0000\\\u027b\u0001\u0000\u0000\u0000^\u027e"+ - "\u0001\u0000\u0000\u0000`\u0282\u0001\u0000\u0000\u0000b\u0286\u0001\u0000"+ - "\u0000\u0000d\u0288\u0001\u0000\u0000\u0000f\u028a\u0001\u0000\u0000\u0000"+ - "h\u028f\u0001\u0000\u0000\u0000j\u0291\u0001\u0000\u0000\u0000l\u0297"+ - "\u0001\u0000\u0000\u0000n\u029d\u0001\u0000\u0000\u0000p\u02a2\u0001\u0000"+ - "\u0000\u0000r\u02a4\u0001\u0000\u0000\u0000t\u02a7\u0001\u0000\u0000\u0000"+ - "v\u02aa\u0001\u0000\u0000\u0000x\u02af\u0001\u0000\u0000\u0000z\u02b3"+ - "\u0001\u0000\u0000\u0000|\u02b8\u0001\u0000\u0000\u0000~\u02be\u0001\u0000"+ - "\u0000\u0000\u0080\u02c1\u0001\u0000\u0000\u0000\u0082\u02c3\u0001\u0000"+ - "\u0000\u0000\u0084\u02c9\u0001\u0000\u0000\u0000\u0086\u02cb\u0001\u0000"+ - "\u0000\u0000\u0088\u02d0\u0001\u0000\u0000\u0000\u008a\u02d3\u0001\u0000"+ - "\u0000\u0000\u008c\u02d6\u0001\u0000\u0000\u0000\u008e\u02d8\u0001\u0000"+ - "\u0000\u0000\u0090\u02db\u0001\u0000\u0000\u0000\u0092\u02dd\u0001\u0000"+ - "\u0000\u0000\u0094\u02e0\u0001\u0000\u0000\u0000\u0096\u02e2\u0001\u0000"+ - "\u0000\u0000\u0098\u02e4\u0001\u0000\u0000\u0000\u009a\u02e6\u0001\u0000"+ - "\u0000\u0000\u009c\u02e8\u0001\u0000\u0000\u0000\u009e\u02ea\u0001\u0000"+ - "\u0000\u0000\u00a0\u02ef\u0001\u0000\u0000\u0000\u00a2\u0304\u0001\u0000"+ - "\u0000\u0000\u00a4\u0306\u0001\u0000\u0000\u0000\u00a6\u030e\u0001\u0000"+ - "\u0000\u0000\u00a8\u0312\u0001\u0000\u0000\u0000\u00aa\u0316\u0001\u0000"+ - "\u0000\u0000\u00ac\u031a\u0001\u0000\u0000\u0000\u00ae\u031f\u0001\u0000"+ - "\u0000\u0000\u00b0\u0325\u0001\u0000\u0000\u0000\u00b2\u032b\u0001\u0000"+ - "\u0000\u0000\u00b4\u032f\u0001\u0000\u0000\u0000\u00b6\u0333\u0001\u0000"+ - "\u0000\u0000\u00b8\u033f\u0001\u0000\u0000\u0000\u00ba\u0342\u0001\u0000"+ - "\u0000\u0000\u00bc\u0346\u0001\u0000\u0000\u0000\u00be\u034a\u0001\u0000"+ - "\u0000\u0000\u00c0\u034e\u0001\u0000\u0000\u0000\u00c2\u0352\u0001\u0000"+ - "\u0000\u0000\u00c4\u0356\u0001\u0000\u0000\u0000\u00c6\u035b\u0001\u0000"+ - "\u0000\u0000\u00c8\u035f\u0001\u0000\u0000\u0000\u00ca\u0367\u0001\u0000"+ - "\u0000\u0000\u00cc\u037c\u0001\u0000\u0000\u0000\u00ce\u037e\u0001\u0000"+ - "\u0000\u0000\u00d0\u0382\u0001\u0000\u0000\u0000\u00d2\u0386\u0001\u0000"+ - "\u0000\u0000\u00d4\u038a\u0001\u0000\u0000\u0000\u00d6\u038e\u0001\u0000"+ - "\u0000\u0000\u00d8\u0393\u0001\u0000\u0000\u0000\u00da\u0397\u0001\u0000"+ - "\u0000\u0000\u00dc\u039b\u0001\u0000\u0000\u0000\u00de\u039f\u0001\u0000"+ - "\u0000\u0000\u00e0\u03a2\u0001\u0000\u0000\u0000\u00e2\u03a6\u0001\u0000"+ - "\u0000\u0000\u00e4\u03aa\u0001\u0000\u0000\u0000\u00e6\u03ae\u0001\u0000"+ - "\u0000\u0000\u00e8\u03b2\u0001\u0000\u0000\u0000\u00ea\u03b6\u0001\u0000"+ - "\u0000\u0000\u00ec\u03bb\u0001\u0000\u0000\u0000\u00ee\u03c0\u0001\u0000"+ - "\u0000\u0000\u00f0\u03c7\u0001\u0000\u0000\u0000\u00f2\u03cb\u0001\u0000"+ - "\u0000\u0000\u00f4\u03cf\u0001\u0000\u0000\u0000\u00f6\u03d3\u0001\u0000"+ - "\u0000\u0000\u00f8\u03d7\u0001\u0000\u0000\u0000\u00fa\u03db\u0001\u0000"+ - "\u0000\u0000\u00fc\u03e1\u0001\u0000\u0000\u0000\u00fe\u03e5\u0001\u0000"+ - "\u0000\u0000\u0100\u03e9\u0001\u0000\u0000\u0000\u0102\u03ed\u0001\u0000"+ - "\u0000\u0000\u0104\u03f1\u0001\u0000\u0000\u0000\u0106\u03f5\u0001\u0000"+ - "\u0000\u0000\u0108\u03f9\u0001\u0000\u0000\u0000\u010a\u03fd\u0001\u0000"+ - "\u0000\u0000\u010c\u0401\u0001\u0000\u0000\u0000\u010e\u0405\u0001\u0000"+ - "\u0000\u0000\u0110\u040a\u0001\u0000\u0000\u0000\u0112\u040e\u0001\u0000"+ - "\u0000\u0000\u0114\u0412\u0001\u0000\u0000\u0000\u0116\u0416\u0001\u0000"+ - "\u0000\u0000\u0118\u041a\u0001\u0000\u0000\u0000\u011a\u041e\u0001\u0000"+ - "\u0000\u0000\u011c\u0422\u0001\u0000\u0000\u0000\u011e\u0427\u0001\u0000"+ - "\u0000\u0000\u0120\u042c\u0001\u0000\u0000\u0000\u0122\u0436\u0001\u0000"+ - "\u0000\u0000\u0124\u043a\u0001\u0000\u0000\u0000\u0126\u043e\u0001\u0000"+ - "\u0000\u0000\u0128\u0129\u0005d\u0000\u0000\u0129\u012a\u0005i\u0000\u0000"+ - "\u012a\u012b\u0005s\u0000\u0000\u012b\u012c\u0005s\u0000\u0000\u012c\u012d"+ - "\u0005e\u0000\u0000\u012d\u012e\u0005c\u0000\u0000\u012e\u012f\u0005t"+ - "\u0000\u0000\u012f\u0130\u0001\u0000\u0000\u0000\u0130\u0131\u0006\u0000"+ - "\u0000\u0000\u0131\u000b\u0001\u0000\u0000\u0000\u0132\u0133\u0005d\u0000"+ - "\u0000\u0133\u0134\u0005r\u0000\u0000\u0134\u0135\u0005o\u0000\u0000\u0135"+ - "\u0136\u0005p\u0000\u0000\u0136\u0137\u0001\u0000\u0000\u0000\u0137\u0138"+ - "\u0006\u0001\u0001\u0000\u0138\r\u0001\u0000\u0000\u0000\u0139\u013a\u0005"+ - "e\u0000\u0000\u013a\u013b\u0005n\u0000\u0000\u013b\u013c\u0005r\u0000"+ - "\u0000\u013c\u013d\u0005i\u0000\u0000\u013d\u013e\u0005c\u0000\u0000\u013e"+ - "\u013f\u0005h\u0000\u0000\u013f\u0140\u0001\u0000\u0000\u0000\u0140\u0141"+ - "\u0006\u0002\u0002\u0000\u0141\u000f\u0001\u0000\u0000\u0000\u0142\u0143"+ - "\u0005e\u0000\u0000\u0143\u0144\u0005v\u0000\u0000\u0144\u0145\u0005a"+ - "\u0000\u0000\u0145\u0146\u0005l\u0000\u0000\u0146\u0147\u0001\u0000\u0000"+ - "\u0000\u0147\u0148\u0006\u0003\u0000\u0000\u0148\u0011\u0001\u0000\u0000"+ - "\u0000\u0149\u014a\u0005e\u0000\u0000\u014a\u014b\u0005x\u0000\u0000\u014b"+ - "\u014c\u0005p\u0000\u0000\u014c\u014d\u0005l\u0000\u0000\u014d\u014e\u0005"+ - "a\u0000\u0000\u014e\u014f\u0005i\u0000\u0000\u014f\u0150\u0005n\u0000"+ - "\u0000\u0150\u0151\u0001\u0000\u0000\u0000\u0151\u0152\u0006\u0004\u0003"+ - "\u0000\u0152\u0013\u0001\u0000\u0000\u0000\u0153\u0154\u0005f\u0000\u0000"+ - "\u0154\u0155\u0005r\u0000\u0000\u0155\u0156\u0005o\u0000\u0000\u0156\u0157"+ - "\u0005m\u0000\u0000\u0157\u0158\u0001\u0000\u0000\u0000\u0158\u0159\u0006"+ - "\u0005\u0004\u0000\u0159\u0015\u0001\u0000\u0000\u0000\u015a\u015b\u0005"+ - "g\u0000\u0000\u015b\u015c\u0005r\u0000\u0000\u015c\u015d\u0005o\u0000"+ - "\u0000\u015d\u015e\u0005k\u0000\u0000\u015e\u015f\u0001\u0000\u0000\u0000"+ - "\u015f\u0160\u0006\u0006\u0000\u0000\u0160\u0017\u0001\u0000\u0000\u0000"+ - "\u0161\u0162\u0005i\u0000\u0000\u0162\u0163\u0005n\u0000\u0000\u0163\u0164"+ - "\u0005l\u0000\u0000\u0164\u0165\u0005i\u0000\u0000\u0165\u0166\u0005n"+ - "\u0000\u0000\u0166\u0167\u0005e\u0000\u0000\u0167\u0168\u0005s\u0000\u0000"+ - "\u0168\u0169\u0005t\u0000\u0000\u0169\u016a\u0005a\u0000\u0000\u016a\u016b"+ - "\u0005t\u0000\u0000\u016b\u016c\u0005s\u0000\u0000\u016c\u016d\u0001\u0000"+ - "\u0000\u0000\u016d\u016e\u0006\u0007\u0000\u0000\u016e\u0019\u0001\u0000"+ - "\u0000\u0000\u016f\u0170\u0005k\u0000\u0000\u0170\u0171\u0005e\u0000\u0000"+ - "\u0171\u0172\u0005e\u0000\u0000\u0172\u0173\u0005p\u0000\u0000\u0173\u0174"+ - "\u0001\u0000\u0000\u0000\u0174\u0175\u0006\b\u0001\u0000\u0175\u001b\u0001"+ - "\u0000\u0000\u0000\u0176\u0177\u0005l\u0000\u0000\u0177\u0178\u0005i\u0000"+ - "\u0000\u0178\u0179\u0005m\u0000\u0000\u0179\u017a\u0005i\u0000\u0000\u017a"+ - "\u017b\u0005t\u0000\u0000\u017b\u017c\u0001\u0000\u0000\u0000\u017c\u017d"+ - "\u0006\t\u0000\u0000\u017d\u001d\u0001\u0000\u0000\u0000\u017e\u017f\u0005"+ - "m\u0000\u0000\u017f\u0180\u0005v\u0000\u0000\u0180\u0181\u0005_\u0000"+ - "\u0000\u0181\u0182\u0005e\u0000\u0000\u0182\u0183\u0005x\u0000\u0000\u0183"+ - "\u0184\u0005p\u0000\u0000\u0184\u0185\u0005a\u0000\u0000\u0185\u0186\u0005"+ - "n\u0000\u0000\u0186\u0187\u0005d\u0000\u0000\u0187\u0188\u0001\u0000\u0000"+ - "\u0000\u0188\u0189\u0006\n\u0005\u0000\u0189\u001f\u0001\u0000\u0000\u0000"+ - "\u018a\u018b\u0005p\u0000\u0000\u018b\u018c\u0005r\u0000\u0000\u018c\u018d"+ - "\u0005o\u0000\u0000\u018d\u018e\u0005j\u0000\u0000\u018e\u018f\u0005e"+ - "\u0000\u0000\u018f\u0190\u0005c\u0000\u0000\u0190\u0191\u0005t\u0000\u0000"+ - "\u0191\u0192\u0001\u0000\u0000\u0000\u0192\u0193\u0006\u000b\u0001\u0000"+ - "\u0193!\u0001\u0000\u0000\u0000\u0194\u0195\u0005r\u0000\u0000\u0195\u0196"+ - "\u0005e\u0000\u0000\u0196\u0197\u0005n\u0000\u0000\u0197\u0198\u0005a"+ - "\u0000\u0000\u0198\u0199\u0005m\u0000\u0000\u0199\u019a\u0005e\u0000\u0000"+ - "\u019a\u019b\u0001\u0000\u0000\u0000\u019b\u019c\u0006\f\u0006\u0000\u019c"+ - "#\u0001\u0000\u0000\u0000\u019d\u019e\u0005r\u0000\u0000\u019e\u019f\u0005"+ - "o\u0000\u0000\u019f\u01a0\u0005w\u0000\u0000\u01a0\u01a1\u0001\u0000\u0000"+ - "\u0000\u01a1\u01a2\u0006\r\u0000\u0000\u01a2%\u0001\u0000\u0000\u0000"+ - "\u01a3\u01a4\u0005s\u0000\u0000\u01a4\u01a5\u0005h\u0000\u0000\u01a5\u01a6"+ - "\u0005o\u0000\u0000\u01a6\u01a7\u0005w\u0000\u0000\u01a7\u01a8\u0001\u0000"+ - "\u0000\u0000\u01a8\u01a9\u0006\u000e\u0007\u0000\u01a9\'\u0001\u0000\u0000"+ - "\u0000\u01aa\u01ab\u0005s\u0000\u0000\u01ab\u01ac\u0005o\u0000\u0000\u01ac"+ - "\u01ad\u0005r\u0000\u0000\u01ad\u01ae\u0005t\u0000\u0000\u01ae\u01af\u0001"+ - "\u0000\u0000\u0000\u01af\u01b0\u0006\u000f\u0000\u0000\u01b0)\u0001\u0000"+ - "\u0000\u0000\u01b1\u01b2\u0005s\u0000\u0000\u01b2\u01b3\u0005t\u0000\u0000"+ - "\u01b3\u01b4\u0005a\u0000\u0000\u01b4\u01b5\u0005t\u0000\u0000\u01b5\u01b6"+ - "\u0005s\u0000\u0000\u01b6\u01b7\u0001\u0000\u0000\u0000\u01b7\u01b8\u0006"+ - "\u0010\u0000\u0000\u01b8+\u0001\u0000\u0000\u0000\u01b9\u01ba\u0005w\u0000"+ - "\u0000\u01ba\u01bb\u0005h\u0000\u0000\u01bb\u01bc\u0005e\u0000\u0000\u01bc"+ - "\u01bd\u0005r\u0000\u0000\u01bd\u01be\u0005e\u0000\u0000\u01be\u01bf\u0001"+ - "\u0000\u0000\u0000\u01bf\u01c0\u0006\u0011\u0000\u0000\u01c0-\u0001\u0000"+ - "\u0000\u0000\u01c1\u01c3\b\u0000\u0000\u0000\u01c2\u01c1\u0001\u0000\u0000"+ - "\u0000\u01c3\u01c4\u0001\u0000\u0000\u0000\u01c4\u01c2\u0001\u0000\u0000"+ - "\u0000\u01c4\u01c5\u0001\u0000\u0000\u0000\u01c5\u01c6\u0001\u0000\u0000"+ - "\u0000\u01c6\u01c7\u0006\u0012\u0000\u0000\u01c7/\u0001\u0000\u0000\u0000"+ - "\u01c8\u01c9\u0005/\u0000\u0000\u01c9\u01ca\u0005/\u0000\u0000\u01ca\u01ce"+ - "\u0001\u0000\u0000\u0000\u01cb\u01cd\b\u0001\u0000\u0000\u01cc\u01cb\u0001"+ - "\u0000\u0000\u0000\u01cd\u01d0\u0001\u0000\u0000\u0000\u01ce\u01cc\u0001"+ - "\u0000\u0000\u0000\u01ce\u01cf\u0001\u0000\u0000\u0000\u01cf\u01d2\u0001"+ - "\u0000\u0000\u0000\u01d0\u01ce\u0001\u0000\u0000\u0000\u01d1\u01d3\u0005"+ - "\r\u0000\u0000\u01d2\u01d1\u0001\u0000\u0000\u0000\u01d2\u01d3\u0001\u0000"+ - "\u0000\u0000\u01d3\u01d5\u0001\u0000\u0000\u0000\u01d4\u01d6\u0005\n\u0000"+ - "\u0000\u01d5\u01d4\u0001\u0000\u0000\u0000\u01d5\u01d6\u0001\u0000\u0000"+ - "\u0000\u01d6\u01d7\u0001\u0000\u0000\u0000\u01d7\u01d8\u0006\u0013\b\u0000"+ - "\u01d81\u0001\u0000\u0000\u0000\u01d9\u01da\u0005/\u0000\u0000\u01da\u01db"+ - "\u0005*\u0000\u0000\u01db\u01e0\u0001\u0000\u0000\u0000\u01dc\u01df\u0003"+ - "2\u0014\u0000\u01dd\u01df\t\u0000\u0000\u0000\u01de\u01dc\u0001\u0000"+ - "\u0000\u0000\u01de\u01dd\u0001\u0000\u0000\u0000\u01df\u01e2\u0001\u0000"+ - "\u0000\u0000\u01e0\u01e1\u0001\u0000\u0000\u0000\u01e0\u01de\u0001\u0000"+ - "\u0000\u0000\u01e1\u01e3\u0001\u0000\u0000\u0000\u01e2\u01e0\u0001\u0000"+ - "\u0000\u0000\u01e3\u01e4\u0005*\u0000\u0000\u01e4\u01e5\u0005/\u0000\u0000"+ - "\u01e5\u01e6\u0001\u0000\u0000\u0000\u01e6\u01e7\u0006\u0014\b\u0000\u01e7"+ - "3\u0001\u0000\u0000\u0000\u01e8\u01ea\u0007\u0002\u0000\u0000\u01e9\u01e8"+ - "\u0001\u0000\u0000\u0000\u01ea\u01eb\u0001\u0000\u0000\u0000\u01eb\u01e9"+ - "\u0001\u0000\u0000\u0000\u01eb\u01ec\u0001\u0000\u0000\u0000\u01ec\u01ed"+ - "\u0001\u0000\u0000\u0000\u01ed\u01ee\u0006\u0015\b\u0000\u01ee5\u0001"+ - "\u0000\u0000\u0000\u01ef\u01f0\u0003\u009eJ\u0000\u01f0\u01f1\u0001\u0000"+ - "\u0000\u0000\u01f1\u01f2\u0006\u0016\t\u0000\u01f2\u01f3\u0006\u0016\n"+ - "\u0000\u01f37\u0001\u0000\u0000\u0000\u01f4\u01f5\u0003@\u001b\u0000\u01f5"+ - "\u01f6\u0001\u0000\u0000\u0000\u01f6\u01f7\u0006\u0017\u000b\u0000\u01f7"+ - "\u01f8\u0006\u0017\f\u0000\u01f89\u0001\u0000\u0000\u0000\u01f9\u01fa"+ - "\u00034\u0015\u0000\u01fa\u01fb\u0001\u0000\u0000\u0000\u01fb\u01fc\u0006"+ - "\u0018\b\u0000\u01fc;\u0001\u0000\u0000\u0000\u01fd\u01fe\u00030\u0013"+ - "\u0000\u01fe\u01ff\u0001\u0000\u0000\u0000\u01ff\u0200\u0006\u0019\b\u0000"+ - "\u0200=\u0001\u0000\u0000\u0000\u0201\u0202\u00032\u0014\u0000\u0202\u0203"+ - "\u0001\u0000\u0000\u0000\u0203\u0204\u0006\u001a\b\u0000\u0204?\u0001"+ - "\u0000\u0000\u0000\u0205\u0206\u0005|\u0000\u0000\u0206\u0207\u0001\u0000"+ - "\u0000\u0000\u0207\u0208\u0006\u001b\f\u0000\u0208A\u0001\u0000\u0000"+ - "\u0000\u0209\u020a\u0007\u0003\u0000\u0000\u020aC\u0001\u0000\u0000\u0000"+ - "\u020b\u020c\u0007\u0004\u0000\u0000\u020cE\u0001\u0000\u0000\u0000\u020d"+ - "\u020e\u0005\\\u0000\u0000\u020e\u020f\u0007\u0005\u0000\u0000\u020fG"+ - "\u0001\u0000\u0000\u0000\u0210\u0211\b\u0006\u0000\u0000\u0211I\u0001"+ - "\u0000\u0000\u0000\u0212\u0214\u0007\u0007\u0000\u0000\u0213\u0215\u0007"+ - "\b\u0000\u0000\u0214\u0213\u0001\u0000\u0000\u0000\u0214\u0215\u0001\u0000"+ - "\u0000\u0000\u0215\u0217\u0001\u0000\u0000\u0000\u0216\u0218\u0003B\u001c"+ - "\u0000\u0217\u0216\u0001\u0000\u0000\u0000\u0218\u0219\u0001\u0000\u0000"+ - "\u0000\u0219\u0217\u0001\u0000\u0000\u0000\u0219\u021a\u0001\u0000\u0000"+ - "\u0000\u021aK\u0001\u0000\u0000\u0000\u021b\u021c\u0005@\u0000\u0000\u021c"+ - "M\u0001\u0000\u0000\u0000\u021d\u021e\u0005`\u0000\u0000\u021eO\u0001"+ - "\u0000\u0000\u0000\u021f\u0223\b\t\u0000\u0000\u0220\u0221\u0005`\u0000"+ - "\u0000\u0221\u0223\u0005`\u0000\u0000\u0222\u021f\u0001\u0000\u0000\u0000"+ - "\u0222\u0220\u0001\u0000\u0000\u0000\u0223Q\u0001\u0000\u0000\u0000\u0224"+ - "\u0225\u0005_\u0000\u0000\u0225S\u0001\u0000\u0000\u0000\u0226\u022a\u0003"+ - "D\u001d\u0000\u0227\u022a\u0003B\u001c\u0000\u0228\u022a\u0003R$\u0000"+ - "\u0229\u0226\u0001\u0000\u0000\u0000\u0229\u0227\u0001\u0000\u0000\u0000"+ - "\u0229\u0228\u0001\u0000\u0000\u0000\u022aU\u0001\u0000\u0000\u0000\u022b"+ - "\u0230\u0005\"\u0000\u0000\u022c\u022f\u0003F\u001e\u0000\u022d\u022f"+ - "\u0003H\u001f\u0000\u022e\u022c\u0001\u0000\u0000\u0000\u022e\u022d\u0001"+ - "\u0000\u0000\u0000\u022f\u0232\u0001\u0000\u0000\u0000\u0230\u022e\u0001"+ - "\u0000\u0000\u0000\u0230\u0231\u0001\u0000\u0000\u0000\u0231\u0233\u0001"+ - "\u0000\u0000\u0000\u0232\u0230\u0001\u0000\u0000\u0000\u0233\u0249\u0005"+ - "\"\u0000\u0000\u0234\u0235\u0005\"\u0000\u0000\u0235\u0236\u0005\"\u0000"+ - "\u0000\u0236\u0237\u0005\"\u0000\u0000\u0237\u023b\u0001\u0000\u0000\u0000"+ - "\u0238\u023a\b\u0001\u0000\u0000\u0239\u0238\u0001\u0000\u0000\u0000\u023a"+ - "\u023d\u0001\u0000\u0000\u0000\u023b\u023c\u0001\u0000\u0000\u0000\u023b"+ - "\u0239\u0001\u0000\u0000\u0000\u023c\u023e\u0001\u0000\u0000\u0000\u023d"+ - "\u023b\u0001\u0000\u0000\u0000\u023e\u023f\u0005\"\u0000\u0000\u023f\u0240"+ - "\u0005\"\u0000\u0000\u0240\u0241\u0005\"\u0000\u0000\u0241\u0243\u0001"+ - "\u0000\u0000\u0000\u0242\u0244\u0005\"\u0000\u0000\u0243\u0242\u0001\u0000"+ - "\u0000\u0000\u0243\u0244\u0001\u0000\u0000\u0000\u0244\u0246\u0001\u0000"+ - "\u0000\u0000\u0245\u0247\u0005\"\u0000\u0000\u0246\u0245\u0001\u0000\u0000"+ - "\u0000\u0246\u0247\u0001\u0000\u0000\u0000\u0247\u0249\u0001\u0000\u0000"+ - "\u0000\u0248\u022b\u0001\u0000\u0000\u0000\u0248\u0234\u0001\u0000\u0000"+ - "\u0000\u0249W\u0001\u0000\u0000\u0000\u024a\u024c\u0003B\u001c\u0000\u024b"+ - "\u024a\u0001\u0000\u0000\u0000\u024c\u024d\u0001\u0000\u0000\u0000\u024d"+ - "\u024b\u0001\u0000\u0000\u0000\u024d\u024e\u0001\u0000\u0000\u0000\u024e"+ - "Y\u0001\u0000\u0000\u0000\u024f\u0251\u0003B\u001c\u0000\u0250\u024f\u0001"+ - "\u0000\u0000\u0000\u0251\u0252\u0001\u0000\u0000\u0000\u0252\u0250\u0001"+ - "\u0000\u0000\u0000\u0252\u0253\u0001\u0000\u0000\u0000\u0253\u0254\u0001"+ - "\u0000\u0000\u0000\u0254\u0258\u0003h/\u0000\u0255\u0257\u0003B\u001c"+ - "\u0000\u0256\u0255\u0001\u0000\u0000\u0000\u0257\u025a\u0001\u0000\u0000"+ - "\u0000\u0258\u0256\u0001\u0000\u0000\u0000\u0258\u0259\u0001\u0000\u0000"+ - "\u0000\u0259\u027a\u0001\u0000\u0000\u0000\u025a\u0258\u0001\u0000\u0000"+ - "\u0000\u025b\u025d\u0003h/\u0000\u025c\u025e\u0003B\u001c\u0000\u025d"+ - "\u025c\u0001\u0000\u0000\u0000\u025e\u025f\u0001\u0000\u0000\u0000\u025f"+ - "\u025d\u0001\u0000\u0000\u0000\u025f\u0260\u0001\u0000\u0000\u0000\u0260"+ - "\u027a\u0001\u0000\u0000\u0000\u0261\u0263\u0003B\u001c\u0000\u0262\u0261"+ - "\u0001\u0000\u0000\u0000\u0263\u0264\u0001\u0000\u0000\u0000\u0264\u0262"+ - "\u0001\u0000\u0000\u0000\u0264\u0265\u0001\u0000\u0000\u0000\u0265\u026d"+ - "\u0001\u0000\u0000\u0000\u0266\u026a\u0003h/\u0000\u0267\u0269\u0003B"+ - "\u001c\u0000\u0268\u0267\u0001\u0000\u0000\u0000\u0269\u026c\u0001\u0000"+ - "\u0000\u0000\u026a\u0268\u0001\u0000\u0000\u0000\u026a\u026b\u0001\u0000"+ - "\u0000\u0000\u026b\u026e\u0001\u0000\u0000\u0000\u026c\u026a\u0001\u0000"+ - "\u0000\u0000\u026d\u0266\u0001\u0000\u0000\u0000\u026d\u026e\u0001\u0000"+ - "\u0000\u0000\u026e\u026f\u0001\u0000\u0000\u0000\u026f\u0270\u0003J \u0000"+ - "\u0270\u027a\u0001\u0000\u0000\u0000\u0271\u0273\u0003h/\u0000\u0272\u0274"+ - "\u0003B\u001c\u0000\u0273\u0272\u0001\u0000\u0000\u0000\u0274\u0275\u0001"+ - "\u0000\u0000\u0000\u0275\u0273\u0001\u0000\u0000\u0000\u0275\u0276\u0001"+ - "\u0000\u0000\u0000\u0276\u0277\u0001\u0000\u0000\u0000\u0277\u0278\u0003"+ - "J \u0000\u0278\u027a\u0001\u0000\u0000\u0000\u0279\u0250\u0001\u0000\u0000"+ - "\u0000\u0279\u025b\u0001\u0000\u0000\u0000\u0279\u0262\u0001\u0000\u0000"+ - "\u0000\u0279\u0271\u0001\u0000\u0000\u0000\u027a[\u0001\u0000\u0000\u0000"+ - "\u027b\u027c\u0005b\u0000\u0000\u027c\u027d\u0005y\u0000\u0000\u027d]"+ - "\u0001\u0000\u0000\u0000\u027e\u027f\u0005a\u0000\u0000\u027f\u0280\u0005"+ - "n\u0000\u0000\u0280\u0281\u0005d\u0000\u0000\u0281_\u0001\u0000\u0000"+ - "\u0000\u0282\u0283\u0005a\u0000\u0000\u0283\u0284\u0005s\u0000\u0000\u0284"+ - "\u0285\u0005c\u0000\u0000\u0285a\u0001\u0000\u0000\u0000\u0286\u0287\u0005"+ - "=\u0000\u0000\u0287c\u0001\u0000\u0000\u0000\u0288\u0289\u0005,\u0000"+ - "\u0000\u0289e\u0001\u0000\u0000\u0000\u028a\u028b\u0005d\u0000\u0000\u028b"+ - "\u028c\u0005e\u0000\u0000\u028c\u028d\u0005s\u0000\u0000\u028d\u028e\u0005"+ - "c\u0000\u0000\u028eg\u0001\u0000\u0000\u0000\u028f\u0290\u0005.\u0000"+ - "\u0000\u0290i\u0001\u0000\u0000\u0000\u0291\u0292\u0005f\u0000\u0000\u0292"+ - "\u0293\u0005a\u0000\u0000\u0293\u0294\u0005l\u0000\u0000\u0294\u0295\u0005"+ - "s\u0000\u0000\u0295\u0296\u0005e\u0000\u0000\u0296k\u0001\u0000\u0000"+ - "\u0000\u0297\u0298\u0005f\u0000\u0000\u0298\u0299\u0005i\u0000\u0000\u0299"+ - "\u029a\u0005r\u0000\u0000\u029a\u029b\u0005s\u0000\u0000\u029b\u029c\u0005"+ - "t\u0000\u0000\u029cm\u0001\u0000\u0000\u0000\u029d\u029e\u0005l\u0000"+ - "\u0000\u029e\u029f\u0005a\u0000\u0000\u029f\u02a0\u0005s\u0000\u0000\u02a0"+ - "\u02a1\u0005t\u0000\u0000\u02a1o\u0001\u0000\u0000\u0000\u02a2\u02a3\u0005"+ - "(\u0000\u0000\u02a3q\u0001\u0000\u0000\u0000\u02a4\u02a5\u0005i\u0000"+ - "\u0000\u02a5\u02a6\u0005n\u0000\u0000\u02a6s\u0001\u0000\u0000\u0000\u02a7"+ - "\u02a8\u0005i\u0000\u0000\u02a8\u02a9\u0005s\u0000\u0000\u02a9u\u0001"+ - "\u0000\u0000\u0000\u02aa\u02ab\u0005l\u0000\u0000\u02ab\u02ac\u0005i\u0000"+ - "\u0000\u02ac\u02ad\u0005k\u0000\u0000\u02ad\u02ae\u0005e\u0000\u0000\u02ae"+ - "w\u0001\u0000\u0000\u0000\u02af\u02b0\u0005n\u0000\u0000\u02b0\u02b1\u0005"+ - "o\u0000\u0000\u02b1\u02b2\u0005t\u0000\u0000\u02b2y\u0001\u0000\u0000"+ - "\u0000\u02b3\u02b4\u0005n\u0000\u0000\u02b4\u02b5\u0005u\u0000\u0000\u02b5"+ - "\u02b6\u0005l\u0000\u0000\u02b6\u02b7\u0005l\u0000\u0000\u02b7{\u0001"+ - "\u0000\u0000\u0000\u02b8\u02b9\u0005n\u0000\u0000\u02b9\u02ba\u0005u\u0000"+ - "\u0000\u02ba\u02bb\u0005l\u0000\u0000\u02bb\u02bc\u0005l\u0000\u0000\u02bc"+ - "\u02bd\u0005s\u0000\u0000\u02bd}\u0001\u0000\u0000\u0000\u02be\u02bf\u0005"+ - "o\u0000\u0000\u02bf\u02c0\u0005r\u0000\u0000\u02c0\u007f\u0001\u0000\u0000"+ - "\u0000\u02c1\u02c2\u0005?\u0000\u0000\u02c2\u0081\u0001\u0000\u0000\u0000"+ - "\u02c3\u02c4\u0005r\u0000\u0000\u02c4\u02c5\u0005l\u0000\u0000\u02c5\u02c6"+ - "\u0005i\u0000\u0000\u02c6\u02c7\u0005k\u0000\u0000\u02c7\u02c8\u0005e"+ - "\u0000\u0000\u02c8\u0083\u0001\u0000\u0000\u0000\u02c9\u02ca\u0005)\u0000"+ - "\u0000\u02ca\u0085\u0001\u0000\u0000\u0000\u02cb\u02cc\u0005t\u0000\u0000"+ - "\u02cc\u02cd\u0005r\u0000\u0000\u02cd\u02ce\u0005u\u0000\u0000\u02ce\u02cf"+ - "\u0005e\u0000\u0000\u02cf\u0087\u0001\u0000\u0000\u0000\u02d0\u02d1\u0005"+ - "=\u0000\u0000\u02d1\u02d2\u0005=\u0000\u0000\u02d2\u0089\u0001\u0000\u0000"+ - "\u0000\u02d3\u02d4\u0005!\u0000\u0000\u02d4\u02d5\u0005=\u0000\u0000\u02d5"+ - "\u008b\u0001\u0000\u0000\u0000\u02d6\u02d7\u0005<\u0000\u0000\u02d7\u008d"+ - "\u0001\u0000\u0000\u0000\u02d8\u02d9\u0005<\u0000\u0000\u02d9\u02da\u0005"+ - "=\u0000\u0000\u02da\u008f\u0001\u0000\u0000\u0000\u02db\u02dc\u0005>\u0000"+ - "\u0000\u02dc\u0091\u0001\u0000\u0000\u0000\u02dd\u02de\u0005>\u0000\u0000"+ - "\u02de\u02df\u0005=\u0000\u0000\u02df\u0093\u0001\u0000\u0000\u0000\u02e0"+ - "\u02e1\u0005+\u0000\u0000\u02e1\u0095\u0001\u0000\u0000\u0000\u02e2\u02e3"+ - "\u0005-\u0000\u0000\u02e3\u0097\u0001\u0000\u0000\u0000\u02e4\u02e5\u0005"+ - "*\u0000\u0000\u02e5\u0099\u0001\u0000\u0000\u0000\u02e6\u02e7\u0005/\u0000"+ - "\u0000\u02e7\u009b\u0001\u0000\u0000\u0000\u02e8\u02e9\u0005%\u0000\u0000"+ - "\u02e9\u009d\u0001\u0000\u0000\u0000\u02ea\u02eb\u0005[\u0000\u0000\u02eb"+ - "\u02ec\u0001\u0000\u0000\u0000\u02ec\u02ed\u0006J\u0000\u0000\u02ed\u02ee"+ - "\u0006J\u0000\u0000\u02ee\u009f\u0001\u0000\u0000\u0000\u02ef\u02f0\u0005"+ - "]\u0000\u0000\u02f0\u02f1\u0001\u0000\u0000\u0000\u02f1\u02f2\u0006K\f"+ - "\u0000\u02f2\u02f3\u0006K\f\u0000\u02f3\u00a1\u0001\u0000\u0000\u0000"+ - "\u02f4\u02f8\u0003D\u001d\u0000\u02f5\u02f7\u0003T%\u0000\u02f6\u02f5"+ - "\u0001\u0000\u0000\u0000\u02f7\u02fa\u0001\u0000\u0000\u0000\u02f8\u02f6"+ - "\u0001\u0000\u0000\u0000\u02f8\u02f9\u0001\u0000\u0000\u0000\u02f9\u0305"+ - "\u0001\u0000\u0000\u0000\u02fa\u02f8\u0001\u0000\u0000\u0000\u02fb\u02fe"+ - "\u0003R$\u0000\u02fc\u02fe\u0003L!\u0000\u02fd\u02fb\u0001\u0000\u0000"+ - "\u0000\u02fd\u02fc\u0001\u0000\u0000\u0000\u02fe\u0300\u0001\u0000\u0000"+ - "\u0000\u02ff\u0301\u0003T%\u0000\u0300\u02ff\u0001\u0000\u0000\u0000\u0301"+ - "\u0302\u0001\u0000\u0000\u0000\u0302\u0300\u0001\u0000\u0000\u0000\u0302"+ - "\u0303\u0001\u0000\u0000\u0000\u0303\u0305\u0001\u0000\u0000\u0000\u0304"+ - "\u02f4\u0001\u0000\u0000\u0000\u0304\u02fd\u0001\u0000\u0000\u0000\u0305"+ - "\u00a3\u0001\u0000\u0000\u0000\u0306\u0308\u0003N\"\u0000\u0307\u0309"+ - "\u0003P#\u0000\u0308\u0307\u0001\u0000\u0000\u0000\u0309\u030a\u0001\u0000"+ - "\u0000\u0000\u030a\u0308\u0001\u0000\u0000\u0000\u030a\u030b\u0001\u0000"+ - "\u0000\u0000\u030b\u030c\u0001\u0000\u0000\u0000\u030c\u030d\u0003N\""+ - "\u0000\u030d\u00a5\u0001\u0000\u0000\u0000\u030e\u030f\u00030\u0013\u0000"+ - "\u030f\u0310\u0001\u0000\u0000\u0000\u0310\u0311\u0006N\b\u0000\u0311"+ - "\u00a7\u0001\u0000\u0000\u0000\u0312\u0313\u00032\u0014\u0000\u0313\u0314"+ - "\u0001\u0000\u0000\u0000\u0314\u0315\u0006O\b\u0000\u0315\u00a9\u0001"+ - "\u0000\u0000\u0000\u0316\u0317\u00034\u0015\u0000\u0317\u0318\u0001\u0000"+ - "\u0000\u0000\u0318\u0319\u0006P\b\u0000\u0319\u00ab\u0001\u0000\u0000"+ - "\u0000\u031a\u031b\u0003@\u001b\u0000\u031b\u031c\u0001\u0000\u0000\u0000"+ - "\u031c\u031d\u0006Q\u000b\u0000\u031d\u031e\u0006Q\f\u0000\u031e\u00ad"+ - "\u0001\u0000\u0000\u0000\u031f\u0320\u0003\u009eJ\u0000\u0320\u0321\u0001"+ - "\u0000\u0000\u0000\u0321\u0322\u0006R\t\u0000\u0322\u0323\u0006R\u0004"+ - "\u0000\u0323\u0324\u0006R\u0004\u0000\u0324\u00af\u0001\u0000\u0000\u0000"+ - "\u0325\u0326\u0003\u00a0K\u0000\u0326\u0327\u0001\u0000\u0000\u0000\u0327"+ - "\u0328\u0006S\r\u0000\u0328\u0329\u0006S\f\u0000\u0329\u032a\u0006S\f"+ - "\u0000\u032a\u00b1\u0001\u0000\u0000\u0000\u032b\u032c\u0003d-\u0000\u032c"+ - "\u032d\u0001\u0000\u0000\u0000\u032d\u032e\u0006T\u000e\u0000\u032e\u00b3"+ - "\u0001\u0000\u0000\u0000\u032f\u0330\u0003b,\u0000\u0330\u0331\u0001\u0000"+ - "\u0000\u0000\u0331\u0332\u0006U\u000f\u0000\u0332\u00b5\u0001\u0000\u0000"+ - "\u0000\u0333\u0334\u0005m\u0000\u0000\u0334\u0335\u0005e\u0000\u0000\u0335"+ - "\u0336\u0005t\u0000\u0000\u0336\u0337\u0005a\u0000\u0000\u0337\u0338\u0005"+ - "d\u0000\u0000\u0338\u0339\u0005a\u0000\u0000\u0339\u033a\u0005t\u0000"+ - "\u0000\u033a\u033b\u0005a\u0000\u0000\u033b\u00b7\u0001\u0000\u0000\u0000"+ - "\u033c\u0340\b\n\u0000\u0000\u033d\u033e\u0005/\u0000\u0000\u033e\u0340"+ - "\b\u000b\u0000\u0000\u033f\u033c\u0001\u0000\u0000\u0000\u033f\u033d\u0001"+ - "\u0000\u0000\u0000\u0340\u00b9\u0001\u0000\u0000\u0000\u0341\u0343\u0003"+ - "\u00b8W\u0000\u0342\u0341\u0001\u0000\u0000\u0000\u0343\u0344\u0001\u0000"+ - "\u0000\u0000\u0344\u0342\u0001\u0000\u0000\u0000\u0344\u0345\u0001\u0000"+ - "\u0000\u0000\u0345\u00bb\u0001\u0000\u0000\u0000\u0346\u0347\u0003\u00a4"+ - "M\u0000\u0347\u0348\u0001\u0000\u0000\u0000\u0348\u0349\u0006Y\u0010\u0000"+ - "\u0349\u00bd\u0001\u0000\u0000\u0000\u034a\u034b\u00030\u0013\u0000\u034b"+ - "\u034c\u0001\u0000\u0000\u0000\u034c\u034d\u0006Z\b\u0000\u034d\u00bf"+ - "\u0001\u0000\u0000\u0000\u034e\u034f\u00032\u0014\u0000\u034f\u0350\u0001"+ - "\u0000\u0000\u0000\u0350\u0351\u0006[\b\u0000\u0351\u00c1\u0001\u0000"+ - "\u0000\u0000\u0352\u0353\u00034\u0015\u0000\u0353\u0354\u0001\u0000\u0000"+ - "\u0000\u0354\u0355\u0006\\\b\u0000\u0355\u00c3\u0001\u0000\u0000\u0000"+ - "\u0356\u0357\u0003@\u001b\u0000\u0357\u0358\u0001\u0000\u0000\u0000\u0358"+ - "\u0359\u0006]\u000b\u0000\u0359\u035a\u0006]\f\u0000\u035a\u00c5\u0001"+ - "\u0000\u0000\u0000\u035b\u035c\u0003h/\u0000\u035c\u035d\u0001\u0000\u0000"+ - "\u0000\u035d\u035e\u0006^\u0011\u0000\u035e\u00c7\u0001\u0000\u0000\u0000"+ - "\u035f\u0360\u0003d-\u0000\u0360\u0361\u0001\u0000\u0000\u0000\u0361\u0362"+ - "\u0006_\u000e\u0000\u0362\u00c9\u0001\u0000\u0000\u0000\u0363\u0368\u0003"+ - "D\u001d\u0000\u0364\u0368\u0003B\u001c\u0000\u0365\u0368\u0003R$\u0000"+ - "\u0366\u0368\u0003\u0098G\u0000\u0367\u0363\u0001\u0000\u0000\u0000\u0367"+ - "\u0364\u0001\u0000\u0000\u0000\u0367\u0365\u0001\u0000\u0000\u0000\u0367"+ - "\u0366\u0001\u0000\u0000\u0000\u0368\u00cb\u0001\u0000\u0000\u0000\u0369"+ - "\u036c\u0003D\u001d\u0000\u036a\u036c\u0003\u0098G\u0000\u036b\u0369\u0001"+ - "\u0000\u0000\u0000\u036b\u036a\u0001\u0000\u0000\u0000\u036c\u0370\u0001"+ - "\u0000\u0000\u0000\u036d\u036f\u0003\u00ca`\u0000\u036e\u036d\u0001\u0000"+ - "\u0000\u0000\u036f\u0372\u0001\u0000\u0000\u0000\u0370\u036e\u0001\u0000"+ - "\u0000\u0000\u0370\u0371\u0001\u0000\u0000\u0000\u0371\u037d\u0001\u0000"+ - "\u0000\u0000\u0372\u0370\u0001\u0000\u0000\u0000\u0373\u0376\u0003R$\u0000"+ - "\u0374\u0376\u0003L!\u0000\u0375\u0373\u0001\u0000\u0000\u0000\u0375\u0374"+ - "\u0001\u0000\u0000\u0000\u0376\u0378\u0001\u0000\u0000\u0000\u0377\u0379"+ - "\u0003\u00ca`\u0000\u0378\u0377\u0001\u0000\u0000\u0000\u0379\u037a\u0001"+ - "\u0000\u0000\u0000\u037a\u0378\u0001\u0000\u0000\u0000\u037a\u037b\u0001"+ - "\u0000\u0000\u0000\u037b\u037d\u0001\u0000\u0000\u0000\u037c\u036b\u0001"+ - "\u0000\u0000\u0000\u037c\u0375\u0001\u0000\u0000\u0000\u037d\u00cd\u0001"+ - "\u0000\u0000\u0000\u037e\u037f\u0003\u00a4M\u0000\u037f\u0380\u0001\u0000"+ - "\u0000\u0000\u0380\u0381\u0006b\u0010\u0000\u0381\u00cf\u0001\u0000\u0000"+ - "\u0000\u0382\u0383\u00030\u0013\u0000\u0383\u0384\u0001\u0000\u0000\u0000"+ - "\u0384\u0385\u0006c\b\u0000\u0385\u00d1\u0001\u0000\u0000\u0000\u0386"+ - "\u0387\u00032\u0014\u0000\u0387\u0388\u0001\u0000\u0000\u0000\u0388\u0389"+ - "\u0006d\b\u0000\u0389\u00d3\u0001\u0000\u0000\u0000\u038a\u038b\u0003"+ - "4\u0015\u0000\u038b\u038c\u0001\u0000\u0000\u0000\u038c\u038d\u0006e\b"+ - "\u0000\u038d\u00d5\u0001\u0000\u0000\u0000\u038e\u038f\u0003@\u001b\u0000"+ - "\u038f\u0390\u0001\u0000\u0000\u0000\u0390\u0391\u0006f\u000b\u0000\u0391"+ - "\u0392\u0006f\f\u0000\u0392\u00d7\u0001\u0000\u0000\u0000\u0393\u0394"+ - "\u0003b,\u0000\u0394\u0395\u0001\u0000\u0000\u0000\u0395\u0396\u0006g"+ - "\u000f\u0000\u0396\u00d9\u0001\u0000\u0000\u0000\u0397\u0398\u0003d-\u0000"+ - "\u0398\u0399\u0001\u0000\u0000\u0000\u0399\u039a\u0006h\u000e\u0000\u039a"+ - "\u00db\u0001\u0000\u0000\u0000\u039b\u039c\u0003h/\u0000\u039c\u039d\u0001"+ - "\u0000\u0000\u0000\u039d\u039e\u0006i\u0011\u0000\u039e\u00dd\u0001\u0000"+ - "\u0000\u0000\u039f\u03a0\u0005a\u0000\u0000\u03a0\u03a1\u0005s\u0000\u0000"+ - "\u03a1\u00df\u0001\u0000\u0000\u0000\u03a2\u03a3\u0003\u00a4M\u0000\u03a3"+ - "\u03a4\u0001\u0000\u0000\u0000\u03a4\u03a5\u0006k\u0010\u0000\u03a5\u00e1"+ - "\u0001\u0000\u0000\u0000\u03a6\u03a7\u0003\u00cca\u0000\u03a7\u03a8\u0001"+ - "\u0000\u0000\u0000\u03a8\u03a9\u0006l\u0012\u0000\u03a9\u00e3\u0001\u0000"+ - "\u0000\u0000\u03aa\u03ab\u00030\u0013\u0000\u03ab\u03ac\u0001\u0000\u0000"+ - "\u0000\u03ac\u03ad\u0006m\b\u0000\u03ad\u00e5\u0001\u0000\u0000\u0000"+ - "\u03ae\u03af\u00032\u0014\u0000\u03af\u03b0\u0001\u0000\u0000\u0000\u03b0"+ - "\u03b1\u0006n\b\u0000\u03b1\u00e7\u0001\u0000\u0000\u0000\u03b2\u03b3"+ - "\u00034\u0015\u0000\u03b3\u03b4\u0001\u0000\u0000\u0000\u03b4\u03b5\u0006"+ - "o\b\u0000\u03b5\u00e9\u0001\u0000\u0000\u0000\u03b6\u03b7\u0003@\u001b"+ - "\u0000\u03b7\u03b8\u0001\u0000\u0000\u0000\u03b8\u03b9\u0006p\u000b\u0000"+ - "\u03b9\u03ba\u0006p\f\u0000\u03ba\u00eb\u0001\u0000\u0000\u0000\u03bb"+ - "\u03bc\u0005o\u0000\u0000\u03bc\u03bd\u0005n\u0000\u0000\u03bd\u03be\u0001"+ - "\u0000\u0000\u0000\u03be\u03bf\u0006q\u0013\u0000\u03bf\u00ed\u0001\u0000"+ - "\u0000\u0000\u03c0\u03c1\u0005w\u0000\u0000\u03c1\u03c2\u0005i\u0000\u0000"+ - "\u03c2\u03c3\u0005t\u0000\u0000\u03c3\u03c4\u0005h\u0000\u0000\u03c4\u03c5"+ - "\u0001\u0000\u0000\u0000\u03c5\u03c6\u0006r\u0013\u0000\u03c6\u00ef\u0001"+ - "\u0000\u0000\u0000\u03c7\u03c8\u0003\u00baX\u0000\u03c8\u03c9\u0001\u0000"+ - "\u0000\u0000\u03c9\u03ca\u0006s\u0014\u0000\u03ca\u00f1\u0001\u0000\u0000"+ - "\u0000\u03cb\u03cc\u0003\u00a4M\u0000\u03cc\u03cd\u0001\u0000\u0000\u0000"+ - "\u03cd\u03ce\u0006t\u0010\u0000\u03ce\u00f3\u0001\u0000\u0000\u0000\u03cf"+ - "\u03d0\u00030\u0013\u0000\u03d0\u03d1\u0001\u0000\u0000\u0000\u03d1\u03d2"+ - "\u0006u\b\u0000\u03d2\u00f5\u0001\u0000\u0000\u0000\u03d3\u03d4\u0003"+ - "2\u0014\u0000\u03d4\u03d5\u0001\u0000\u0000\u0000\u03d5\u03d6\u0006v\b"+ - "\u0000\u03d6\u00f7\u0001\u0000\u0000\u0000\u03d7\u03d8\u00034\u0015\u0000"+ - "\u03d8\u03d9\u0001\u0000\u0000\u0000\u03d9\u03da\u0006w\b\u0000\u03da"+ - "\u00f9\u0001\u0000\u0000\u0000\u03db\u03dc\u0003@\u001b\u0000\u03dc\u03dd"+ - "\u0001\u0000\u0000\u0000\u03dd\u03de\u0006x\u000b\u0000\u03de\u03df\u0006"+ - "x\f\u0000\u03df\u03e0\u0006x\f\u0000\u03e0\u00fb\u0001\u0000\u0000\u0000"+ - "\u03e1\u03e2\u0003b,\u0000\u03e2\u03e3\u0001\u0000\u0000\u0000\u03e3\u03e4"+ - "\u0006y\u000f\u0000\u03e4\u00fd\u0001\u0000\u0000\u0000\u03e5\u03e6\u0003"+ - "d-\u0000\u03e6\u03e7\u0001\u0000\u0000\u0000\u03e7\u03e8\u0006z\u000e"+ - "\u0000\u03e8\u00ff\u0001\u0000\u0000\u0000\u03e9\u03ea\u0003h/\u0000\u03ea"+ - "\u03eb\u0001\u0000\u0000\u0000\u03eb\u03ec\u0006{\u0011\u0000\u03ec\u0101"+ - "\u0001\u0000\u0000\u0000\u03ed\u03ee\u0003\u00eer\u0000\u03ee\u03ef\u0001"+ - "\u0000\u0000\u0000\u03ef\u03f0\u0006|\u0015\u0000\u03f0\u0103\u0001\u0000"+ - "\u0000\u0000\u03f1\u03f2\u0003\u00cca\u0000\u03f2\u03f3\u0001\u0000\u0000"+ - "\u0000\u03f3\u03f4\u0006}\u0012\u0000\u03f4\u0105\u0001\u0000\u0000\u0000"+ - "\u03f5\u03f6\u0003\u00a4M\u0000\u03f6\u03f7\u0001\u0000\u0000\u0000\u03f7"+ - "\u03f8\u0006~\u0010\u0000\u03f8\u0107\u0001\u0000\u0000\u0000\u03f9\u03fa"+ - "\u00030\u0013\u0000\u03fa\u03fb\u0001\u0000\u0000\u0000\u03fb\u03fc\u0006"+ - "\u007f\b\u0000\u03fc\u0109\u0001\u0000\u0000\u0000\u03fd\u03fe\u00032"+ - "\u0014\u0000\u03fe\u03ff\u0001\u0000\u0000\u0000\u03ff\u0400\u0006\u0080"+ - "\b\u0000\u0400\u010b\u0001\u0000\u0000\u0000\u0401\u0402\u00034\u0015"+ - "\u0000\u0402\u0403\u0001\u0000\u0000\u0000\u0403\u0404\u0006\u0081\b\u0000"+ - "\u0404\u010d\u0001\u0000\u0000\u0000\u0405\u0406\u0003@\u001b\u0000\u0406"+ - "\u0407\u0001\u0000\u0000\u0000\u0407\u0408\u0006\u0082\u000b\u0000\u0408"+ - "\u0409\u0006\u0082\f\u0000\u0409\u010f\u0001\u0000\u0000\u0000\u040a\u040b"+ - "\u0003h/\u0000\u040b\u040c\u0001\u0000\u0000\u0000\u040c\u040d\u0006\u0083"+ - "\u0011\u0000\u040d\u0111\u0001\u0000\u0000\u0000\u040e\u040f\u0003\u00a4"+ - "M\u0000\u040f\u0410\u0001\u0000\u0000\u0000\u0410\u0411\u0006\u0084\u0010"+ - "\u0000\u0411\u0113\u0001\u0000\u0000\u0000\u0412\u0413\u0003\u00a2L\u0000"+ - "\u0413\u0414\u0001\u0000\u0000\u0000\u0414\u0415\u0006\u0085\u0016\u0000"+ - "\u0415\u0115\u0001\u0000\u0000\u0000\u0416\u0417\u00030\u0013\u0000\u0417"+ - "\u0418\u0001\u0000\u0000\u0000\u0418\u0419\u0006\u0086\b\u0000\u0419\u0117"+ - "\u0001\u0000\u0000\u0000\u041a\u041b\u00032\u0014\u0000\u041b\u041c\u0001"+ - "\u0000\u0000\u0000\u041c\u041d\u0006\u0087\b\u0000\u041d\u0119\u0001\u0000"+ - "\u0000\u0000\u041e\u041f\u00034\u0015\u0000\u041f\u0420\u0001\u0000\u0000"+ - "\u0000\u0420\u0421\u0006\u0088\b\u0000\u0421\u011b\u0001\u0000\u0000\u0000"+ - "\u0422\u0423\u0003@\u001b\u0000\u0423\u0424\u0001\u0000\u0000\u0000\u0424"+ - "\u0425\u0006\u0089\u000b\u0000\u0425\u0426\u0006\u0089\f\u0000\u0426\u011d"+ - "\u0001\u0000\u0000\u0000\u0427\u0428\u0005i\u0000\u0000\u0428\u0429\u0005"+ - "n\u0000\u0000\u0429\u042a\u0005f\u0000\u0000\u042a\u042b\u0005o\u0000"+ - "\u0000\u042b\u011f\u0001\u0000\u0000\u0000\u042c\u042d\u0005f\u0000\u0000"+ - "\u042d\u042e\u0005u\u0000\u0000\u042e\u042f\u0005n\u0000\u0000\u042f\u0430"+ - "\u0005c\u0000\u0000\u0430\u0431\u0005t\u0000\u0000\u0431\u0432\u0005i"+ - "\u0000\u0000\u0432\u0433\u0005o\u0000\u0000\u0433\u0434\u0005n\u0000\u0000"+ - "\u0434\u0435\u0005s\u0000\u0000\u0435\u0121\u0001\u0000\u0000\u0000\u0436"+ - "\u0437\u00030\u0013\u0000\u0437\u0438\u0001\u0000\u0000\u0000\u0438\u0439"+ - "\u0006\u008c\b\u0000\u0439\u0123\u0001\u0000\u0000\u0000\u043a\u043b\u0003"+ - "2\u0014\u0000\u043b\u043c\u0001\u0000\u0000\u0000\u043c\u043d\u0006\u008d"+ - "\b\u0000\u043d\u0125\u0001\u0000\u0000\u0000\u043e\u043f\u00034\u0015"+ - "\u0000\u043f\u0440\u0001\u0000\u0000\u0000\u0440\u0441\u0006\u008e\b\u0000"+ - "\u0441\u0127\u0001\u0000\u0000\u00001\u0000\u0001\u0002\u0003\u0004\u0005"+ - "\u0006\u0007\b\t\u01c4\u01ce\u01d2\u01d5\u01de\u01e0\u01eb\u0214\u0219"+ - "\u0222\u0229\u022e\u0230\u023b\u0243\u0246\u0248\u024d\u0252\u0258\u025f"+ - "\u0264\u026a\u026d\u0275\u0279\u02f8\u02fd\u0302\u0304\u030a\u033f\u0344"+ - "\u0367\u036b\u0370\u0375\u037a\u037c\u0017\u0005\u0002\u0000\u0005\u0004"+ - "\u0000\u0005\u0006\u0000\u0005\u0001\u0000\u0005\u0003\u0000\u0005\b\u0000"+ - "\u0005\u0005\u0000\u0005\t\u0000\u0000\u0001\u0000\u0007?\u0000\u0005"+ - "\u0000\u0000\u0007\u001a\u0000\u0004\u0000\u0000\u0007@\u0000\u0007\""+ - "\u0000\u0007!\u0000\u0007B\u0000\u0007$\u0000\u0007K\u0000\u0005\u0007"+ - "\u0000\u0007G\u0000\u0007T\u0000\u0007A\u0000"; + "\u0089\u0001\u0089\u0001\u0089\u0001\u0089\u0001\u008a\u0001\u008a\u0001"+ + "\u008a\u0001\u008a\u0001\u008b\u0001\u008b\u0001\u008b\u0001\u008b\u0001"+ + "\u008c\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008d\u0001\u008d\u0001"+ + "\u008d\u0001\u008d\u0001\u008d\u0001\u008e\u0001\u008e\u0001\u008e\u0001"+ + "\u008e\u0001\u008e\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u008f\u0001"+ + "\u008f\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u008f\u0001"+ + "\u0090\u0001\u0090\u0001\u0090\u0001\u0090\u0001\u0091\u0001\u0091\u0001"+ + "\u0091\u0001\u0091\u0001\u0092\u0001\u0092\u0001\u0092\u0001\u0092\u0001"+ + "\u0093\u0001\u0093\u0001\u0093\u0001\u0093\u0001\u0093\u0001\u0094\u0001"+ + "\u0094\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0095\u0004"+ + "\u0095\u0475\b\u0095\u000b\u0095\f\u0095\u0476\u0001\u0096\u0001\u0096"+ + "\u0001\u0096\u0001\u0096\u0001\u0097\u0001\u0097\u0001\u0097\u0001\u0097"+ + "\u0001\u0098\u0001\u0098\u0001\u0098\u0001\u0098\u0002\u01f5\u0250\u0000"+ + "\u0099\u000b\u0001\r\u0002\u000f\u0003\u0011\u0004\u0013\u0005\u0015\u0006"+ + "\u0017\u0007\u0019\b\u001b\t\u001d\n\u001f\u000b!\f#\r%\u000e\'\u000f"+ + ")\u0010+\u0011-\u0012/\u00131\u00143\u00155\u00167\u00009\u0000;\u0017"+ + "=\u0018?\u0019A\u001aC\u0000E\u0000G\u0000I\u0000K\u0000M\u0000O\u0000"+ + "Q\u0000S\u0000U\u0000W\u001bY\u001c[\u001d]\u001e_\u001fa c!e\"g#i$k%"+ + "m&o\'q(s)u*w+y,{-}.\u007f/\u00810\u00831\u00852\u00873\u00894\u008b5\u008d"+ + "6\u008f7\u00918\u00939\u0095:\u0097;\u0099<\u009b=\u009d>\u009f?\u00a1"+ + "@\u00a3A\u00a5B\u00a7C\u00a9D\u00abE\u00ad\u0000\u00af\u0000\u00b1\u0000"+ + "\u00b3\u0000\u00b5\u0000\u00b7F\u00b9\u0000\u00bbG\u00bd\u0000\u00bfH"+ + "\u00c1I\u00c3J\u00c5\u0000\u00c7\u0000\u00c9\u0000\u00cb\u0000\u00cdK"+ + "\u00cf\u0000\u00d1\u0000\u00d3L\u00d5M\u00d7N\u00d9\u0000\u00db\u0000"+ + "\u00dd\u0000\u00df\u0000\u00e1O\u00e3\u0000\u00e5\u0000\u00e7P\u00e9Q"+ + "\u00ebR\u00ed\u0000\u00ef\u0000\u00f1S\u00f3T\u00f5\u0000\u00f7U\u00f9"+ + "\u0000\u00fb\u0000\u00fdV\u00ffW\u0101X\u0103\u0000\u0105\u0000\u0107"+ + "\u0000\u0109\u0000\u010b\u0000\u010d\u0000\u010f\u0000\u0111Y\u0113Z\u0115"+ + "[\u0117\u0000\u0119\u0000\u011b\u0000\u011d\u0000\u011f\\\u0121]\u0123"+ + "^\u0125\u0000\u0127_\u0129`\u012ba\u012db\u012fc\u0131\u0000\u0133d\u0135"+ + "e\u0137f\u0139g\u013bh\u000b\u0000\u0001\u0002\u0003\u0004\u0005\u0006"+ + "\u0007\b\t\n\r\u0006\u0000\t\n\r\r //[[]]\u0002\u0000\n\n\r\r\u0003\u0000"+ + "\t\n\r\r \u0001\u000009\u0002\u0000AZaz\u0005\u0000\"\"\\\\nnrrtt\u0004"+ + "\u0000\n\n\r\r\"\"\\\\\u0002\u0000EEee\u0002\u0000++--\u0001\u0000``\n"+ + "\u0000\t\n\r\r ,,//==[[]]``||\u0002\u0000**//\u000b\u0000\t\n\r\r \""+ + "#,,//::<<>?\\\\||\u049f\u0000\u000b\u0001\u0000\u0000\u0000\u0000\r\u0001"+ + "\u0000\u0000\u0000\u0000\u000f\u0001\u0000\u0000\u0000\u0000\u0011\u0001"+ + "\u0000\u0000\u0000\u0000\u0013\u0001\u0000\u0000\u0000\u0000\u0015\u0001"+ + "\u0000\u0000\u0000\u0000\u0017\u0001\u0000\u0000\u0000\u0000\u0019\u0001"+ + "\u0000\u0000\u0000\u0000\u001b\u0001\u0000\u0000\u0000\u0000\u001d\u0001"+ + "\u0000\u0000\u0000\u0000\u001f\u0001\u0000\u0000\u0000\u0000!\u0001\u0000"+ + "\u0000\u0000\u0000#\u0001\u0000\u0000\u0000\u0000%\u0001\u0000\u0000\u0000"+ + "\u0000\'\u0001\u0000\u0000\u0000\u0000)\u0001\u0000\u0000\u0000\u0000"+ + "+\u0001\u0000\u0000\u0000\u0000-\u0001\u0000\u0000\u0000\u0000/\u0001"+ + "\u0000\u0000\u0000\u00001\u0001\u0000\u0000\u0000\u00003\u0001\u0000\u0000"+ + "\u0000\u00005\u0001\u0000\u0000\u0000\u00017\u0001\u0000\u0000\u0000\u0001"+ + "9\u0001\u0000\u0000\u0000\u0001;\u0001\u0000\u0000\u0000\u0001=\u0001"+ + "\u0000\u0000\u0000\u0001?\u0001\u0000\u0000\u0000\u0002A\u0001\u0000\u0000"+ + "\u0000\u0002W\u0001\u0000\u0000\u0000\u0002Y\u0001\u0000\u0000\u0000\u0002"+ + "[\u0001\u0000\u0000\u0000\u0002]\u0001\u0000\u0000\u0000\u0002_\u0001"+ + "\u0000\u0000\u0000\u0002a\u0001\u0000\u0000\u0000\u0002c\u0001\u0000\u0000"+ + "\u0000\u0002e\u0001\u0000\u0000\u0000\u0002g\u0001\u0000\u0000\u0000\u0002"+ + "i\u0001\u0000\u0000\u0000\u0002k\u0001\u0000\u0000\u0000\u0002m\u0001"+ + "\u0000\u0000\u0000\u0002o\u0001\u0000\u0000\u0000\u0002q\u0001\u0000\u0000"+ + "\u0000\u0002s\u0001\u0000\u0000\u0000\u0002u\u0001\u0000\u0000\u0000\u0002"+ + "w\u0001\u0000\u0000\u0000\u0002y\u0001\u0000\u0000\u0000\u0002{\u0001"+ + "\u0000\u0000\u0000\u0002}\u0001\u0000\u0000\u0000\u0002\u007f\u0001\u0000"+ + "\u0000\u0000\u0002\u0081\u0001\u0000\u0000\u0000\u0002\u0083\u0001\u0000"+ + "\u0000\u0000\u0002\u0085\u0001\u0000\u0000\u0000\u0002\u0087\u0001\u0000"+ + "\u0000\u0000\u0002\u0089\u0001\u0000\u0000\u0000\u0002\u008b\u0001\u0000"+ + "\u0000\u0000\u0002\u008d\u0001\u0000\u0000\u0000\u0002\u008f\u0001\u0000"+ + "\u0000\u0000\u0002\u0091\u0001\u0000\u0000\u0000\u0002\u0093\u0001\u0000"+ + "\u0000\u0000\u0002\u0095\u0001\u0000\u0000\u0000\u0002\u0097\u0001\u0000"+ + "\u0000\u0000\u0002\u0099\u0001\u0000\u0000\u0000\u0002\u009b\u0001\u0000"+ + "\u0000\u0000\u0002\u009d\u0001\u0000\u0000\u0000\u0002\u009f\u0001\u0000"+ + "\u0000\u0000\u0002\u00a1\u0001\u0000\u0000\u0000\u0002\u00a3\u0001\u0000"+ + "\u0000\u0000\u0002\u00a5\u0001\u0000\u0000\u0000\u0002\u00a7\u0001\u0000"+ + "\u0000\u0000\u0002\u00a9\u0001\u0000\u0000\u0000\u0002\u00ab\u0001\u0000"+ + "\u0000\u0000\u0003\u00ad\u0001\u0000\u0000\u0000\u0003\u00af\u0001\u0000"+ + "\u0000\u0000\u0003\u00b1\u0001\u0000\u0000\u0000\u0003\u00b3\u0001\u0000"+ + "\u0000\u0000\u0003\u00b5\u0001\u0000\u0000\u0000\u0003\u00b7\u0001\u0000"+ + "\u0000\u0000\u0003\u00bb\u0001\u0000\u0000\u0000\u0003\u00bd\u0001\u0000"+ + "\u0000\u0000\u0003\u00bf\u0001\u0000\u0000\u0000\u0003\u00c1\u0001\u0000"+ + "\u0000\u0000\u0003\u00c3\u0001\u0000\u0000\u0000\u0004\u00c5\u0001\u0000"+ + "\u0000\u0000\u0004\u00c7\u0001\u0000\u0000\u0000\u0004\u00c9\u0001\u0000"+ + "\u0000\u0000\u0004\u00cd\u0001\u0000\u0000\u0000\u0004\u00cf\u0001\u0000"+ + "\u0000\u0000\u0004\u00d1\u0001\u0000\u0000\u0000\u0004\u00d3\u0001\u0000"+ + "\u0000\u0000\u0004\u00d5\u0001\u0000\u0000\u0000\u0004\u00d7\u0001\u0000"+ + "\u0000\u0000\u0005\u00d9\u0001\u0000\u0000\u0000\u0005\u00db\u0001\u0000"+ + "\u0000\u0000\u0005\u00dd\u0001\u0000\u0000\u0000\u0005\u00df\u0001\u0000"+ + "\u0000\u0000\u0005\u00e1\u0001\u0000\u0000\u0000\u0005\u00e3\u0001\u0000"+ + "\u0000\u0000\u0005\u00e5\u0001\u0000\u0000\u0000\u0005\u00e7\u0001\u0000"+ + "\u0000\u0000\u0005\u00e9\u0001\u0000\u0000\u0000\u0005\u00eb\u0001\u0000"+ + "\u0000\u0000\u0006\u00ed\u0001\u0000\u0000\u0000\u0006\u00ef\u0001\u0000"+ + "\u0000\u0000\u0006\u00f1\u0001\u0000\u0000\u0000\u0006\u00f3\u0001\u0000"+ + "\u0000\u0000\u0006\u00f7\u0001\u0000\u0000\u0000\u0006\u00f9\u0001\u0000"+ + "\u0000\u0000\u0006\u00fb\u0001\u0000\u0000\u0000\u0006\u00fd\u0001\u0000"+ + "\u0000\u0000\u0006\u00ff\u0001\u0000\u0000\u0000\u0006\u0101\u0001\u0000"+ + "\u0000\u0000\u0007\u0103\u0001\u0000\u0000\u0000\u0007\u0105\u0001\u0000"+ + "\u0000\u0000\u0007\u0107\u0001\u0000\u0000\u0000\u0007\u0109\u0001\u0000"+ + "\u0000\u0000\u0007\u010b\u0001\u0000\u0000\u0000\u0007\u010d\u0001\u0000"+ + "\u0000\u0000\u0007\u010f\u0001\u0000\u0000\u0000\u0007\u0111\u0001\u0000"+ + "\u0000\u0000\u0007\u0113\u0001\u0000\u0000\u0000\u0007\u0115\u0001\u0000"+ + "\u0000\u0000\b\u0117\u0001\u0000\u0000\u0000\b\u0119\u0001\u0000\u0000"+ + "\u0000\b\u011b\u0001\u0000\u0000\u0000\b\u011d\u0001\u0000\u0000\u0000"+ + "\b\u011f\u0001\u0000\u0000\u0000\b\u0121\u0001\u0000\u0000\u0000\b\u0123"+ + "\u0001\u0000\u0000\u0000\t\u0125\u0001\u0000\u0000\u0000\t\u0127\u0001"+ + "\u0000\u0000\u0000\t\u0129\u0001\u0000\u0000\u0000\t\u012b\u0001\u0000"+ + "\u0000\u0000\t\u012d\u0001\u0000\u0000\u0000\t\u012f\u0001\u0000\u0000"+ + "\u0000\n\u0131\u0001\u0000\u0000\u0000\n\u0133\u0001\u0000\u0000\u0000"+ + "\n\u0135\u0001\u0000\u0000\u0000\n\u0137\u0001\u0000\u0000\u0000\n\u0139"+ + "\u0001\u0000\u0000\u0000\n\u013b\u0001\u0000\u0000\u0000\u000b\u013d\u0001"+ + "\u0000\u0000\u0000\r\u0147\u0001\u0000\u0000\u0000\u000f\u014e\u0001\u0000"+ + "\u0000\u0000\u0011\u0157\u0001\u0000\u0000\u0000\u0013\u015e\u0001\u0000"+ + "\u0000\u0000\u0015\u0168\u0001\u0000\u0000\u0000\u0017\u016f\u0001\u0000"+ + "\u0000\u0000\u0019\u0176\u0001\u0000\u0000\u0000\u001b\u0184\u0001\u0000"+ + "\u0000\u0000\u001d\u018b\u0001\u0000\u0000\u0000\u001f\u0193\u0001\u0000"+ + "\u0000\u0000!\u019f\u0001\u0000\u0000\u0000#\u01a9\u0001\u0000\u0000\u0000"+ + "%\u01b2\u0001\u0000\u0000\u0000\'\u01b8\u0001\u0000\u0000\u0000)\u01bf"+ + "\u0001\u0000\u0000\u0000+\u01c6\u0001\u0000\u0000\u0000-\u01ce\u0001\u0000"+ + "\u0000\u0000/\u01d7\u0001\u0000\u0000\u00001\u01dd\u0001\u0000\u0000\u0000"+ + "3\u01ee\u0001\u0000\u0000\u00005\u01fe\u0001\u0000\u0000\u00007\u0204"+ + "\u0001\u0000\u0000\u00009\u0209\u0001\u0000\u0000\u0000;\u020e\u0001\u0000"+ + "\u0000\u0000=\u0212\u0001\u0000\u0000\u0000?\u0216\u0001\u0000\u0000\u0000"+ + "A\u021a\u0001\u0000\u0000\u0000C\u021e\u0001\u0000\u0000\u0000E\u0220"+ + "\u0001\u0000\u0000\u0000G\u0222\u0001\u0000\u0000\u0000I\u0225\u0001\u0000"+ + "\u0000\u0000K\u0227\u0001\u0000\u0000\u0000M\u0230\u0001\u0000\u0000\u0000"+ + "O\u0232\u0001\u0000\u0000\u0000Q\u0237\u0001\u0000\u0000\u0000S\u0239"+ + "\u0001\u0000\u0000\u0000U\u023e\u0001\u0000\u0000\u0000W\u025d\u0001\u0000"+ + "\u0000\u0000Y\u0260\u0001\u0000\u0000\u0000[\u028e\u0001\u0000\u0000\u0000"+ + "]\u0290\u0001\u0000\u0000\u0000_\u0293\u0001\u0000\u0000\u0000a\u0297"+ + "\u0001\u0000\u0000\u0000c\u029b\u0001\u0000\u0000\u0000e\u029d\u0001\u0000"+ + "\u0000\u0000g\u029f\u0001\u0000\u0000\u0000i\u02a4\u0001\u0000\u0000\u0000"+ + "k\u02a6\u0001\u0000\u0000\u0000m\u02ac\u0001\u0000\u0000\u0000o\u02b2"+ + "\u0001\u0000\u0000\u0000q\u02b7\u0001\u0000\u0000\u0000s\u02b9\u0001\u0000"+ + "\u0000\u0000u\u02bc\u0001\u0000\u0000\u0000w\u02bf\u0001\u0000\u0000\u0000"+ + "y\u02c4\u0001\u0000\u0000\u0000{\u02c8\u0001\u0000\u0000\u0000}\u02cd"+ + "\u0001\u0000\u0000\u0000\u007f\u02d3\u0001\u0000\u0000\u0000\u0081\u02d6"+ + "\u0001\u0000\u0000\u0000\u0083\u02d8\u0001\u0000\u0000\u0000\u0085\u02de"+ + "\u0001\u0000\u0000\u0000\u0087\u02e0\u0001\u0000\u0000\u0000\u0089\u02e5"+ + "\u0001\u0000\u0000\u0000\u008b\u02e8\u0001\u0000\u0000\u0000\u008d\u02eb"+ + "\u0001\u0000\u0000\u0000\u008f\u02ed\u0001\u0000\u0000\u0000\u0091\u02f0"+ + "\u0001\u0000\u0000\u0000\u0093\u02f2\u0001\u0000\u0000\u0000\u0095\u02f5"+ + "\u0001\u0000\u0000\u0000\u0097\u02f7\u0001\u0000\u0000\u0000\u0099\u02f9"+ + "\u0001\u0000\u0000\u0000\u009b\u02fb\u0001\u0000\u0000\u0000\u009d\u02fd"+ + "\u0001\u0000\u0000\u0000\u009f\u02ff\u0001\u0000\u0000\u0000\u00a1\u0304"+ + "\u0001\u0000\u0000\u0000\u00a3\u0319\u0001\u0000\u0000\u0000\u00a5\u031b"+ + "\u0001\u0000\u0000\u0000\u00a7\u0323\u0001\u0000\u0000\u0000\u00a9\u0327"+ + "\u0001\u0000\u0000\u0000\u00ab\u032b\u0001\u0000\u0000\u0000\u00ad\u032f"+ + "\u0001\u0000\u0000\u0000\u00af\u0334\u0001\u0000\u0000\u0000\u00b1\u0338"+ + "\u0001\u0000\u0000\u0000\u00b3\u033c\u0001\u0000\u0000\u0000\u00b5\u0340"+ + "\u0001\u0000\u0000\u0000\u00b7\u0344\u0001\u0000\u0000\u0000\u00b9\u0350"+ + "\u0001\u0000\u0000\u0000\u00bb\u0353\u0001\u0000\u0000\u0000\u00bd\u0357"+ + "\u0001\u0000\u0000\u0000\u00bf\u035b\u0001\u0000\u0000\u0000\u00c1\u035f"+ + "\u0001\u0000\u0000\u0000\u00c3\u0363\u0001\u0000\u0000\u0000\u00c5\u0367"+ + "\u0001\u0000\u0000\u0000\u00c7\u036c\u0001\u0000\u0000\u0000\u00c9\u0370"+ + "\u0001\u0000\u0000\u0000\u00cb\u0378\u0001\u0000\u0000\u0000\u00cd\u038d"+ + "\u0001\u0000\u0000\u0000\u00cf\u038f\u0001\u0000\u0000\u0000\u00d1\u0393"+ + "\u0001\u0000\u0000\u0000\u00d3\u0397\u0001\u0000\u0000\u0000\u00d5\u039b"+ + "\u0001\u0000\u0000\u0000\u00d7\u039f\u0001\u0000\u0000\u0000\u00d9\u03a3"+ + "\u0001\u0000\u0000\u0000\u00db\u03a8\u0001\u0000\u0000\u0000\u00dd\u03ac"+ + "\u0001\u0000\u0000\u0000\u00df\u03b0\u0001\u0000\u0000\u0000\u00e1\u03b4"+ + "\u0001\u0000\u0000\u0000\u00e3\u03b7\u0001\u0000\u0000\u0000\u00e5\u03bb"+ + "\u0001\u0000\u0000\u0000\u00e7\u03bf\u0001\u0000\u0000\u0000\u00e9\u03c3"+ + "\u0001\u0000\u0000\u0000\u00eb\u03c7\u0001\u0000\u0000\u0000\u00ed\u03cb"+ + "\u0001\u0000\u0000\u0000\u00ef\u03d0\u0001\u0000\u0000\u0000\u00f1\u03d5"+ + "\u0001\u0000\u0000\u0000\u00f3\u03da\u0001\u0000\u0000\u0000\u00f5\u03e1"+ + "\u0001\u0000\u0000\u0000\u00f7\u03e5\u0001\u0000\u0000\u0000\u00f9\u03ed"+ + "\u0001\u0000\u0000\u0000\u00fb\u03f1\u0001\u0000\u0000\u0000\u00fd\u03f5"+ + "\u0001\u0000\u0000\u0000\u00ff\u03f9\u0001\u0000\u0000\u0000\u0101\u03fd"+ + "\u0001\u0000\u0000\u0000\u0103\u0401\u0001\u0000\u0000\u0000\u0105\u0407"+ + "\u0001\u0000\u0000\u0000\u0107\u040b\u0001\u0000\u0000\u0000\u0109\u040f"+ + "\u0001\u0000\u0000\u0000\u010b\u0413\u0001\u0000\u0000\u0000\u010d\u0417"+ + "\u0001\u0000\u0000\u0000\u010f\u041b\u0001\u0000\u0000\u0000\u0111\u041f"+ + "\u0001\u0000\u0000\u0000\u0113\u0423\u0001\u0000\u0000\u0000\u0115\u0427"+ + "\u0001\u0000\u0000\u0000\u0117\u042b\u0001\u0000\u0000\u0000\u0119\u0430"+ + "\u0001\u0000\u0000\u0000\u011b\u0434\u0001\u0000\u0000\u0000\u011d\u0438"+ + "\u0001\u0000\u0000\u0000\u011f\u043c\u0001\u0000\u0000\u0000\u0121\u0440"+ + "\u0001\u0000\u0000\u0000\u0123\u0444\u0001\u0000\u0000\u0000\u0125\u0448"+ + "\u0001\u0000\u0000\u0000\u0127\u044d\u0001\u0000\u0000\u0000\u0129\u0452"+ + "\u0001\u0000\u0000\u0000\u012b\u045c\u0001\u0000\u0000\u0000\u012d\u0460"+ + "\u0001\u0000\u0000\u0000\u012f\u0464\u0001\u0000\u0000\u0000\u0131\u0468"+ + "\u0001\u0000\u0000\u0000\u0133\u046d\u0001\u0000\u0000\u0000\u0135\u0474"+ + "\u0001\u0000\u0000\u0000\u0137\u0478\u0001\u0000\u0000\u0000\u0139\u047c"+ + "\u0001\u0000\u0000\u0000\u013b\u0480\u0001\u0000\u0000\u0000\u013d\u013e"+ + "\u0005d\u0000\u0000\u013e\u013f\u0005i\u0000\u0000\u013f\u0140\u0005s"+ + "\u0000\u0000\u0140\u0141\u0005s\u0000\u0000\u0141\u0142\u0005e\u0000\u0000"+ + "\u0142\u0143\u0005c\u0000\u0000\u0143\u0144\u0005t\u0000\u0000\u0144\u0145"+ + "\u0001\u0000\u0000\u0000\u0145\u0146\u0006\u0000\u0000\u0000\u0146\f\u0001"+ + "\u0000\u0000\u0000\u0147\u0148\u0005d\u0000\u0000\u0148\u0149\u0005r\u0000"+ + "\u0000\u0149\u014a\u0005o\u0000\u0000\u014a\u014b\u0005p\u0000\u0000\u014b"+ + "\u014c\u0001\u0000\u0000\u0000\u014c\u014d\u0006\u0001\u0001\u0000\u014d"+ + "\u000e\u0001\u0000\u0000\u0000\u014e\u014f\u0005e\u0000\u0000\u014f\u0150"+ + "\u0005n\u0000\u0000\u0150\u0151\u0005r\u0000\u0000\u0151\u0152\u0005i"+ + "\u0000\u0000\u0152\u0153\u0005c\u0000\u0000\u0153\u0154\u0005h\u0000\u0000"+ + "\u0154\u0155\u0001\u0000\u0000\u0000\u0155\u0156\u0006\u0002\u0002\u0000"+ + "\u0156\u0010\u0001\u0000\u0000\u0000\u0157\u0158\u0005e\u0000\u0000\u0158"+ + "\u0159\u0005v\u0000\u0000\u0159\u015a\u0005a\u0000\u0000\u015a\u015b\u0005"+ + "l\u0000\u0000\u015b\u015c\u0001\u0000\u0000\u0000\u015c\u015d\u0006\u0003"+ + "\u0000\u0000\u015d\u0012\u0001\u0000\u0000\u0000\u015e\u015f\u0005e\u0000"+ + "\u0000\u015f\u0160\u0005x\u0000\u0000\u0160\u0161\u0005p\u0000\u0000\u0161"+ + "\u0162\u0005l\u0000\u0000\u0162\u0163\u0005a\u0000\u0000\u0163\u0164\u0005"+ + "i\u0000\u0000\u0164\u0165\u0005n\u0000\u0000\u0165\u0166\u0001\u0000\u0000"+ + "\u0000\u0166\u0167\u0006\u0004\u0003\u0000\u0167\u0014\u0001\u0000\u0000"+ + "\u0000\u0168\u0169\u0005f\u0000\u0000\u0169\u016a\u0005r\u0000\u0000\u016a"+ + "\u016b\u0005o\u0000\u0000\u016b\u016c\u0005m\u0000\u0000\u016c\u016d\u0001"+ + "\u0000\u0000\u0000\u016d\u016e\u0006\u0005\u0004\u0000\u016e\u0016\u0001"+ + "\u0000\u0000\u0000\u016f\u0170\u0005g\u0000\u0000\u0170\u0171\u0005r\u0000"+ + "\u0000\u0171\u0172\u0005o\u0000\u0000\u0172\u0173\u0005k\u0000\u0000\u0173"+ + "\u0174\u0001\u0000\u0000\u0000\u0174\u0175\u0006\u0006\u0000\u0000\u0175"+ + "\u0018\u0001\u0000\u0000\u0000\u0176\u0177\u0005i\u0000\u0000\u0177\u0178"+ + "\u0005n\u0000\u0000\u0178\u0179\u0005l\u0000\u0000\u0179\u017a\u0005i"+ + "\u0000\u0000\u017a\u017b\u0005n\u0000\u0000\u017b\u017c\u0005e\u0000\u0000"+ + "\u017c\u017d\u0005s\u0000\u0000\u017d\u017e\u0005t\u0000\u0000\u017e\u017f"+ + "\u0005a\u0000\u0000\u017f\u0180\u0005t\u0000\u0000\u0180\u0181\u0005s"+ + "\u0000\u0000\u0181\u0182\u0001\u0000\u0000\u0000\u0182\u0183\u0006\u0007"+ + "\u0000\u0000\u0183\u001a\u0001\u0000\u0000\u0000\u0184\u0185\u0005k\u0000"+ + "\u0000\u0185\u0186\u0005e\u0000\u0000\u0186\u0187\u0005e\u0000\u0000\u0187"+ + "\u0188\u0005p\u0000\u0000\u0188\u0189\u0001\u0000\u0000\u0000\u0189\u018a"+ + "\u0006\b\u0001\u0000\u018a\u001c\u0001\u0000\u0000\u0000\u018b\u018c\u0005"+ + "l\u0000\u0000\u018c\u018d\u0005i\u0000\u0000\u018d\u018e\u0005m\u0000"+ + "\u0000\u018e\u018f\u0005i\u0000\u0000\u018f\u0190\u0005t\u0000\u0000\u0190"+ + "\u0191\u0001\u0000\u0000\u0000\u0191\u0192\u0006\t\u0000\u0000\u0192\u001e"+ + "\u0001\u0000\u0000\u0000\u0193\u0194\u0005m\u0000\u0000\u0194\u0195\u0005"+ + "v\u0000\u0000\u0195\u0196\u0005_\u0000\u0000\u0196\u0197\u0005e\u0000"+ + "\u0000\u0197\u0198\u0005x\u0000\u0000\u0198\u0199\u0005p\u0000\u0000\u0199"+ + "\u019a\u0005a\u0000\u0000\u019a\u019b\u0005n\u0000\u0000\u019b\u019c\u0005"+ + "d\u0000\u0000\u019c\u019d\u0001\u0000\u0000\u0000\u019d\u019e\u0006\n"+ + "\u0005\u0000\u019e \u0001\u0000\u0000\u0000\u019f\u01a0\u0005p\u0000\u0000"+ + "\u01a0\u01a1\u0005r\u0000\u0000\u01a1\u01a2\u0005o\u0000\u0000\u01a2\u01a3"+ + "\u0005j\u0000\u0000\u01a3\u01a4\u0005e\u0000\u0000\u01a4\u01a5\u0005c"+ + "\u0000\u0000\u01a5\u01a6\u0005t\u0000\u0000\u01a6\u01a7\u0001\u0000\u0000"+ + "\u0000\u01a7\u01a8\u0006\u000b\u0001\u0000\u01a8\"\u0001\u0000\u0000\u0000"+ + "\u01a9\u01aa\u0005r\u0000\u0000\u01aa\u01ab\u0005e\u0000\u0000\u01ab\u01ac"+ + "\u0005n\u0000\u0000\u01ac\u01ad\u0005a\u0000\u0000\u01ad\u01ae\u0005m"+ + "\u0000\u0000\u01ae\u01af\u0005e\u0000\u0000\u01af\u01b0\u0001\u0000\u0000"+ + "\u0000\u01b0\u01b1\u0006\f\u0006\u0000\u01b1$\u0001\u0000\u0000\u0000"+ + "\u01b2\u01b3\u0005r\u0000\u0000\u01b3\u01b4\u0005o\u0000\u0000\u01b4\u01b5"+ + "\u0005w\u0000\u0000\u01b5\u01b6\u0001\u0000\u0000\u0000\u01b6\u01b7\u0006"+ + "\r\u0000\u0000\u01b7&\u0001\u0000\u0000\u0000\u01b8\u01b9\u0005s\u0000"+ + "\u0000\u01b9\u01ba\u0005h\u0000\u0000\u01ba\u01bb\u0005o\u0000\u0000\u01bb"+ + "\u01bc\u0005w\u0000\u0000\u01bc\u01bd\u0001\u0000\u0000\u0000\u01bd\u01be"+ + "\u0006\u000e\u0007\u0000\u01be(\u0001\u0000\u0000\u0000\u01bf\u01c0\u0005"+ + "s\u0000\u0000\u01c0\u01c1\u0005o\u0000\u0000\u01c1\u01c2\u0005r\u0000"+ + "\u0000\u01c2\u01c3\u0005t\u0000\u0000\u01c3\u01c4\u0001\u0000\u0000\u0000"+ + "\u01c4\u01c5\u0006\u000f\u0000\u0000\u01c5*\u0001\u0000\u0000\u0000\u01c6"+ + "\u01c7\u0005s\u0000\u0000\u01c7\u01c8\u0005t\u0000\u0000\u01c8\u01c9\u0005"+ + "a\u0000\u0000\u01c9\u01ca\u0005t\u0000\u0000\u01ca\u01cb\u0005s\u0000"+ + "\u0000\u01cb\u01cc\u0001\u0000\u0000\u0000\u01cc\u01cd\u0006\u0010\u0000"+ + "\u0000\u01cd,\u0001\u0000\u0000\u0000\u01ce\u01cf\u0005w\u0000\u0000\u01cf"+ + "\u01d0\u0005h\u0000\u0000\u01d0\u01d1\u0005e\u0000\u0000\u01d1\u01d2\u0005"+ + "r\u0000\u0000\u01d2\u01d3\u0005e\u0000\u0000\u01d3\u01d4\u0001\u0000\u0000"+ + "\u0000\u01d4\u01d5\u0006\u0011\u0000\u0000\u01d5.\u0001\u0000\u0000\u0000"+ + "\u01d6\u01d8\b\u0000\u0000\u0000\u01d7\u01d6\u0001\u0000\u0000\u0000\u01d8"+ + "\u01d9\u0001\u0000\u0000\u0000\u01d9\u01d7\u0001\u0000\u0000\u0000\u01d9"+ + "\u01da\u0001\u0000\u0000\u0000\u01da\u01db\u0001\u0000\u0000\u0000\u01db"+ + "\u01dc\u0006\u0012\u0000\u0000\u01dc0\u0001\u0000\u0000\u0000\u01dd\u01de"+ + "\u0005/\u0000\u0000\u01de\u01df\u0005/\u0000\u0000\u01df\u01e3\u0001\u0000"+ + "\u0000\u0000\u01e0\u01e2\b\u0001\u0000\u0000\u01e1\u01e0\u0001\u0000\u0000"+ + "\u0000\u01e2\u01e5\u0001\u0000\u0000\u0000\u01e3\u01e1\u0001\u0000\u0000"+ + "\u0000\u01e3\u01e4\u0001\u0000\u0000\u0000\u01e4\u01e7\u0001\u0000\u0000"+ + "\u0000\u01e5\u01e3\u0001\u0000\u0000\u0000\u01e6\u01e8\u0005\r\u0000\u0000"+ + "\u01e7\u01e6\u0001\u0000\u0000\u0000\u01e7\u01e8\u0001\u0000\u0000\u0000"+ + "\u01e8\u01ea\u0001\u0000\u0000\u0000\u01e9\u01eb\u0005\n\u0000\u0000\u01ea"+ + "\u01e9\u0001\u0000\u0000\u0000\u01ea\u01eb\u0001\u0000\u0000\u0000\u01eb"+ + "\u01ec\u0001\u0000\u0000\u0000\u01ec\u01ed\u0006\u0013\b\u0000\u01ed2"+ + "\u0001\u0000\u0000\u0000\u01ee\u01ef\u0005/\u0000\u0000\u01ef\u01f0\u0005"+ + "*\u0000\u0000\u01f0\u01f5\u0001\u0000\u0000\u0000\u01f1\u01f4\u00033\u0014"+ + "\u0000\u01f2\u01f4\t\u0000\u0000\u0000\u01f3\u01f1\u0001\u0000\u0000\u0000"+ + "\u01f3\u01f2\u0001\u0000\u0000\u0000\u01f4\u01f7\u0001\u0000\u0000\u0000"+ + "\u01f5\u01f6\u0001\u0000\u0000\u0000\u01f5\u01f3\u0001\u0000\u0000\u0000"+ + "\u01f6\u01f8\u0001\u0000\u0000\u0000\u01f7\u01f5\u0001\u0000\u0000\u0000"+ + "\u01f8\u01f9\u0005*\u0000\u0000\u01f9\u01fa\u0005/\u0000\u0000\u01fa\u01fb"+ + "\u0001\u0000\u0000\u0000\u01fb\u01fc\u0006\u0014\b\u0000\u01fc4\u0001"+ + "\u0000\u0000\u0000\u01fd\u01ff\u0007\u0002\u0000\u0000\u01fe\u01fd\u0001"+ + "\u0000\u0000\u0000\u01ff\u0200\u0001\u0000\u0000\u0000\u0200\u01fe\u0001"+ + "\u0000\u0000\u0000\u0200\u0201\u0001\u0000\u0000\u0000\u0201\u0202\u0001"+ + "\u0000\u0000\u0000\u0202\u0203\u0006\u0015\b\u0000\u02036\u0001\u0000"+ + "\u0000\u0000\u0204\u0205\u0003\u009fJ\u0000\u0205\u0206\u0001\u0000\u0000"+ + "\u0000\u0206\u0207\u0006\u0016\t\u0000\u0207\u0208\u0006\u0016\n\u0000"+ + "\u02088\u0001\u0000\u0000\u0000\u0209\u020a\u0003A\u001b\u0000\u020a\u020b"+ + "\u0001\u0000\u0000\u0000\u020b\u020c\u0006\u0017\u000b\u0000\u020c\u020d"+ + "\u0006\u0017\f\u0000\u020d:\u0001\u0000\u0000\u0000\u020e\u020f\u0003"+ + "5\u0015\u0000\u020f\u0210\u0001\u0000\u0000\u0000\u0210\u0211\u0006\u0018"+ + "\b\u0000\u0211<\u0001\u0000\u0000\u0000\u0212\u0213\u00031\u0013\u0000"+ + "\u0213\u0214\u0001\u0000\u0000\u0000\u0214\u0215\u0006\u0019\b\u0000\u0215"+ + ">\u0001\u0000\u0000\u0000\u0216\u0217\u00033\u0014\u0000\u0217\u0218\u0001"+ + "\u0000\u0000\u0000\u0218\u0219\u0006\u001a\b\u0000\u0219@\u0001\u0000"+ + "\u0000\u0000\u021a\u021b\u0005|\u0000\u0000\u021b\u021c\u0001\u0000\u0000"+ + "\u0000\u021c\u021d\u0006\u001b\f\u0000\u021dB\u0001\u0000\u0000\u0000"+ + "\u021e\u021f\u0007\u0003\u0000\u0000\u021fD\u0001\u0000\u0000\u0000\u0220"+ + "\u0221\u0007\u0004\u0000\u0000\u0221F\u0001\u0000\u0000\u0000\u0222\u0223"+ + "\u0005\\\u0000\u0000\u0223\u0224\u0007\u0005\u0000\u0000\u0224H\u0001"+ + "\u0000\u0000\u0000\u0225\u0226\b\u0006\u0000\u0000\u0226J\u0001\u0000"+ + "\u0000\u0000\u0227\u0229\u0007\u0007\u0000\u0000\u0228\u022a\u0007\b\u0000"+ + "\u0000\u0229\u0228\u0001\u0000\u0000\u0000\u0229\u022a\u0001\u0000\u0000"+ + "\u0000\u022a\u022c\u0001\u0000\u0000\u0000\u022b\u022d\u0003C\u001c\u0000"+ + "\u022c\u022b\u0001\u0000\u0000\u0000\u022d\u022e\u0001\u0000\u0000\u0000"+ + "\u022e\u022c\u0001\u0000\u0000\u0000\u022e\u022f\u0001\u0000\u0000\u0000"+ + "\u022fL\u0001\u0000\u0000\u0000\u0230\u0231\u0005@\u0000\u0000\u0231N"+ + "\u0001\u0000\u0000\u0000\u0232\u0233\u0005`\u0000\u0000\u0233P\u0001\u0000"+ + "\u0000\u0000\u0234\u0238\b\t\u0000\u0000\u0235\u0236\u0005`\u0000\u0000"+ + "\u0236\u0238\u0005`\u0000\u0000\u0237\u0234\u0001\u0000\u0000\u0000\u0237"+ + "\u0235\u0001\u0000\u0000\u0000\u0238R\u0001\u0000\u0000\u0000\u0239\u023a"+ + "\u0005_\u0000\u0000\u023aT\u0001\u0000\u0000\u0000\u023b\u023f\u0003E"+ + "\u001d\u0000\u023c\u023f\u0003C\u001c\u0000\u023d\u023f\u0003S$\u0000"+ + "\u023e\u023b\u0001\u0000\u0000\u0000\u023e\u023c\u0001\u0000\u0000\u0000"+ + "\u023e\u023d\u0001\u0000\u0000\u0000\u023fV\u0001\u0000\u0000\u0000\u0240"+ + "\u0245\u0005\"\u0000\u0000\u0241\u0244\u0003G\u001e\u0000\u0242\u0244"+ + "\u0003I\u001f\u0000\u0243\u0241\u0001\u0000\u0000\u0000\u0243\u0242\u0001"+ + "\u0000\u0000\u0000\u0244\u0247\u0001\u0000\u0000\u0000\u0245\u0243\u0001"+ + "\u0000\u0000\u0000\u0245\u0246\u0001\u0000\u0000\u0000\u0246\u0248\u0001"+ + "\u0000\u0000\u0000\u0247\u0245\u0001\u0000\u0000\u0000\u0248\u025e\u0005"+ + "\"\u0000\u0000\u0249\u024a\u0005\"\u0000\u0000\u024a\u024b\u0005\"\u0000"+ + "\u0000\u024b\u024c\u0005\"\u0000\u0000\u024c\u0250\u0001\u0000\u0000\u0000"+ + "\u024d\u024f\b\u0001\u0000\u0000\u024e\u024d\u0001\u0000\u0000\u0000\u024f"+ + "\u0252\u0001\u0000\u0000\u0000\u0250\u0251\u0001\u0000\u0000\u0000\u0250"+ + "\u024e\u0001\u0000\u0000\u0000\u0251\u0253\u0001\u0000\u0000\u0000\u0252"+ + "\u0250\u0001\u0000\u0000\u0000\u0253\u0254\u0005\"\u0000\u0000\u0254\u0255"+ + "\u0005\"\u0000\u0000\u0255\u0256\u0005\"\u0000\u0000\u0256\u0258\u0001"+ + "\u0000\u0000\u0000\u0257\u0259\u0005\"\u0000\u0000\u0258\u0257\u0001\u0000"+ + "\u0000\u0000\u0258\u0259\u0001\u0000\u0000\u0000\u0259\u025b\u0001\u0000"+ + "\u0000\u0000\u025a\u025c\u0005\"\u0000\u0000\u025b\u025a\u0001\u0000\u0000"+ + "\u0000\u025b\u025c\u0001\u0000\u0000\u0000\u025c\u025e\u0001\u0000\u0000"+ + "\u0000\u025d\u0240\u0001\u0000\u0000\u0000\u025d\u0249\u0001\u0000\u0000"+ + "\u0000\u025eX\u0001\u0000\u0000\u0000\u025f\u0261\u0003C\u001c\u0000\u0260"+ + "\u025f\u0001\u0000\u0000\u0000\u0261\u0262\u0001\u0000\u0000\u0000\u0262"+ + "\u0260\u0001\u0000\u0000\u0000\u0262\u0263\u0001\u0000\u0000\u0000\u0263"+ + "Z\u0001\u0000\u0000\u0000\u0264\u0266\u0003C\u001c\u0000\u0265\u0264\u0001"+ + "\u0000\u0000\u0000\u0266\u0267\u0001\u0000\u0000\u0000\u0267\u0265\u0001"+ + "\u0000\u0000\u0000\u0267\u0268\u0001\u0000\u0000\u0000\u0268\u0269\u0001"+ + "\u0000\u0000\u0000\u0269\u026d\u0003i/\u0000\u026a\u026c\u0003C\u001c"+ + "\u0000\u026b\u026a\u0001\u0000\u0000\u0000\u026c\u026f\u0001\u0000\u0000"+ + "\u0000\u026d\u026b\u0001\u0000\u0000\u0000\u026d\u026e\u0001\u0000\u0000"+ + "\u0000\u026e\u028f\u0001\u0000\u0000\u0000\u026f\u026d\u0001\u0000\u0000"+ + "\u0000\u0270\u0272\u0003i/\u0000\u0271\u0273\u0003C\u001c\u0000\u0272"+ + "\u0271\u0001\u0000\u0000\u0000\u0273\u0274\u0001\u0000\u0000\u0000\u0274"+ + "\u0272\u0001\u0000\u0000\u0000\u0274\u0275\u0001\u0000\u0000\u0000\u0275"+ + "\u028f\u0001\u0000\u0000\u0000\u0276\u0278\u0003C\u001c\u0000\u0277\u0276"+ + "\u0001\u0000\u0000\u0000\u0278\u0279\u0001\u0000\u0000\u0000\u0279\u0277"+ + "\u0001\u0000\u0000\u0000\u0279\u027a\u0001\u0000\u0000\u0000\u027a\u0282"+ + "\u0001\u0000\u0000\u0000\u027b\u027f\u0003i/\u0000\u027c\u027e\u0003C"+ + "\u001c\u0000\u027d\u027c\u0001\u0000\u0000\u0000\u027e\u0281\u0001\u0000"+ + "\u0000\u0000\u027f\u027d\u0001\u0000\u0000\u0000\u027f\u0280\u0001\u0000"+ + "\u0000\u0000\u0280\u0283\u0001\u0000\u0000\u0000\u0281\u027f\u0001\u0000"+ + "\u0000\u0000\u0282\u027b\u0001\u0000\u0000\u0000\u0282\u0283\u0001\u0000"+ + "\u0000\u0000\u0283\u0284\u0001\u0000\u0000\u0000\u0284\u0285\u0003K \u0000"+ + "\u0285\u028f\u0001\u0000\u0000\u0000\u0286\u0288\u0003i/\u0000\u0287\u0289"+ + "\u0003C\u001c\u0000\u0288\u0287\u0001\u0000\u0000\u0000\u0289\u028a\u0001"+ + "\u0000\u0000\u0000\u028a\u0288\u0001\u0000\u0000\u0000\u028a\u028b\u0001"+ + "\u0000\u0000\u0000\u028b\u028c\u0001\u0000\u0000\u0000\u028c\u028d\u0003"+ + "K \u0000\u028d\u028f\u0001\u0000\u0000\u0000\u028e\u0265\u0001\u0000\u0000"+ + "\u0000\u028e\u0270\u0001\u0000\u0000\u0000\u028e\u0277\u0001\u0000\u0000"+ + "\u0000\u028e\u0286\u0001\u0000\u0000\u0000\u028f\\\u0001\u0000\u0000\u0000"+ + "\u0290\u0291\u0005b\u0000\u0000\u0291\u0292\u0005y\u0000\u0000\u0292^"+ + "\u0001\u0000\u0000\u0000\u0293\u0294\u0005a\u0000\u0000\u0294\u0295\u0005"+ + "n\u0000\u0000\u0295\u0296\u0005d\u0000\u0000\u0296`\u0001\u0000\u0000"+ + "\u0000\u0297\u0298\u0005a\u0000\u0000\u0298\u0299\u0005s\u0000\u0000\u0299"+ + "\u029a\u0005c\u0000\u0000\u029ab\u0001\u0000\u0000\u0000\u029b\u029c\u0005"+ + "=\u0000\u0000\u029cd\u0001\u0000\u0000\u0000\u029d\u029e\u0005,\u0000"+ + "\u0000\u029ef\u0001\u0000\u0000\u0000\u029f\u02a0\u0005d\u0000\u0000\u02a0"+ + "\u02a1\u0005e\u0000\u0000\u02a1\u02a2\u0005s\u0000\u0000\u02a2\u02a3\u0005"+ + "c\u0000\u0000\u02a3h\u0001\u0000\u0000\u0000\u02a4\u02a5\u0005.\u0000"+ + "\u0000\u02a5j\u0001\u0000\u0000\u0000\u02a6\u02a7\u0005f\u0000\u0000\u02a7"+ + "\u02a8\u0005a\u0000\u0000\u02a8\u02a9\u0005l\u0000\u0000\u02a9\u02aa\u0005"+ + "s\u0000\u0000\u02aa\u02ab\u0005e\u0000\u0000\u02abl\u0001\u0000\u0000"+ + "\u0000\u02ac\u02ad\u0005f\u0000\u0000\u02ad\u02ae\u0005i\u0000\u0000\u02ae"+ + "\u02af\u0005r\u0000\u0000\u02af\u02b0\u0005s\u0000\u0000\u02b0\u02b1\u0005"+ + "t\u0000\u0000\u02b1n\u0001\u0000\u0000\u0000\u02b2\u02b3\u0005l\u0000"+ + "\u0000\u02b3\u02b4\u0005a\u0000\u0000\u02b4\u02b5\u0005s\u0000\u0000\u02b5"+ + "\u02b6\u0005t\u0000\u0000\u02b6p\u0001\u0000\u0000\u0000\u02b7\u02b8\u0005"+ + "(\u0000\u0000\u02b8r\u0001\u0000\u0000\u0000\u02b9\u02ba\u0005i\u0000"+ + "\u0000\u02ba\u02bb\u0005n\u0000\u0000\u02bbt\u0001\u0000\u0000\u0000\u02bc"+ + "\u02bd\u0005i\u0000\u0000\u02bd\u02be\u0005s\u0000\u0000\u02bev\u0001"+ + "\u0000\u0000\u0000\u02bf\u02c0\u0005l\u0000\u0000\u02c0\u02c1\u0005i\u0000"+ + "\u0000\u02c1\u02c2\u0005k\u0000\u0000\u02c2\u02c3\u0005e\u0000\u0000\u02c3"+ + "x\u0001\u0000\u0000\u0000\u02c4\u02c5\u0005n\u0000\u0000\u02c5\u02c6\u0005"+ + "o\u0000\u0000\u02c6\u02c7\u0005t\u0000\u0000\u02c7z\u0001\u0000\u0000"+ + "\u0000\u02c8\u02c9\u0005n\u0000\u0000\u02c9\u02ca\u0005u\u0000\u0000\u02ca"+ + "\u02cb\u0005l\u0000\u0000\u02cb\u02cc\u0005l\u0000\u0000\u02cc|\u0001"+ + "\u0000\u0000\u0000\u02cd\u02ce\u0005n\u0000\u0000\u02ce\u02cf\u0005u\u0000"+ + "\u0000\u02cf\u02d0\u0005l\u0000\u0000\u02d0\u02d1\u0005l\u0000\u0000\u02d1"+ + "\u02d2\u0005s\u0000\u0000\u02d2~\u0001\u0000\u0000\u0000\u02d3\u02d4\u0005"+ + "o\u0000\u0000\u02d4\u02d5\u0005r\u0000\u0000\u02d5\u0080\u0001\u0000\u0000"+ + "\u0000\u02d6\u02d7\u0005?\u0000\u0000\u02d7\u0082\u0001\u0000\u0000\u0000"+ + "\u02d8\u02d9\u0005r\u0000\u0000\u02d9\u02da\u0005l\u0000\u0000\u02da\u02db"+ + "\u0005i\u0000\u0000\u02db\u02dc\u0005k\u0000\u0000\u02dc\u02dd\u0005e"+ + "\u0000\u0000\u02dd\u0084\u0001\u0000\u0000\u0000\u02de\u02df\u0005)\u0000"+ + "\u0000\u02df\u0086\u0001\u0000\u0000\u0000\u02e0\u02e1\u0005t\u0000\u0000"+ + "\u02e1\u02e2\u0005r\u0000\u0000\u02e2\u02e3\u0005u\u0000\u0000\u02e3\u02e4"+ + "\u0005e\u0000\u0000\u02e4\u0088\u0001\u0000\u0000\u0000\u02e5\u02e6\u0005"+ + "=\u0000\u0000\u02e6\u02e7\u0005=\u0000\u0000\u02e7\u008a\u0001\u0000\u0000"+ + "\u0000\u02e8\u02e9\u0005!\u0000\u0000\u02e9\u02ea\u0005=\u0000\u0000\u02ea"+ + "\u008c\u0001\u0000\u0000\u0000\u02eb\u02ec\u0005<\u0000\u0000\u02ec\u008e"+ + "\u0001\u0000\u0000\u0000\u02ed\u02ee\u0005<\u0000\u0000\u02ee\u02ef\u0005"+ + "=\u0000\u0000\u02ef\u0090\u0001\u0000\u0000\u0000\u02f0\u02f1\u0005>\u0000"+ + "\u0000\u02f1\u0092\u0001\u0000\u0000\u0000\u02f2\u02f3\u0005>\u0000\u0000"+ + "\u02f3\u02f4\u0005=\u0000\u0000\u02f4\u0094\u0001\u0000\u0000\u0000\u02f5"+ + "\u02f6\u0005+\u0000\u0000\u02f6\u0096\u0001\u0000\u0000\u0000\u02f7\u02f8"+ + "\u0005-\u0000\u0000\u02f8\u0098\u0001\u0000\u0000\u0000\u02f9\u02fa\u0005"+ + "*\u0000\u0000\u02fa\u009a\u0001\u0000\u0000\u0000\u02fb\u02fc\u0005/\u0000"+ + "\u0000\u02fc\u009c\u0001\u0000\u0000\u0000\u02fd\u02fe\u0005%\u0000\u0000"+ + "\u02fe\u009e\u0001\u0000\u0000\u0000\u02ff\u0300\u0005[\u0000\u0000\u0300"+ + "\u0301\u0001\u0000\u0000\u0000\u0301\u0302\u0006J\u0000\u0000\u0302\u0303"+ + "\u0006J\u0000\u0000\u0303\u00a0\u0001\u0000\u0000\u0000\u0304\u0305\u0005"+ + "]\u0000\u0000\u0305\u0306\u0001\u0000\u0000\u0000\u0306\u0307\u0006K\f"+ + "\u0000\u0307\u0308\u0006K\f\u0000\u0308\u00a2\u0001\u0000\u0000\u0000"+ + "\u0309\u030d\u0003E\u001d\u0000\u030a\u030c\u0003U%\u0000\u030b\u030a"+ + "\u0001\u0000\u0000\u0000\u030c\u030f\u0001\u0000\u0000\u0000\u030d\u030b"+ + "\u0001\u0000\u0000\u0000\u030d\u030e\u0001\u0000\u0000\u0000\u030e\u031a"+ + "\u0001\u0000\u0000\u0000\u030f\u030d\u0001\u0000\u0000\u0000\u0310\u0313"+ + "\u0003S$\u0000\u0311\u0313\u0003M!\u0000\u0312\u0310\u0001\u0000\u0000"+ + "\u0000\u0312\u0311\u0001\u0000\u0000\u0000\u0313\u0315\u0001\u0000\u0000"+ + "\u0000\u0314\u0316\u0003U%\u0000\u0315\u0314\u0001\u0000\u0000\u0000\u0316"+ + "\u0317\u0001\u0000\u0000\u0000\u0317\u0315\u0001\u0000\u0000\u0000\u0317"+ + "\u0318\u0001\u0000\u0000\u0000\u0318\u031a\u0001\u0000\u0000\u0000\u0319"+ + "\u0309\u0001\u0000\u0000\u0000\u0319\u0312\u0001\u0000\u0000\u0000\u031a"+ + "\u00a4\u0001\u0000\u0000\u0000\u031b\u031d\u0003O\"\u0000\u031c\u031e"+ + "\u0003Q#\u0000\u031d\u031c\u0001\u0000\u0000\u0000\u031e\u031f\u0001\u0000"+ + "\u0000\u0000\u031f\u031d\u0001\u0000\u0000\u0000\u031f\u0320\u0001\u0000"+ + "\u0000\u0000\u0320\u0321\u0001\u0000\u0000\u0000\u0321\u0322\u0003O\""+ + "\u0000\u0322\u00a6\u0001\u0000\u0000\u0000\u0323\u0324\u00031\u0013\u0000"+ + "\u0324\u0325\u0001\u0000\u0000\u0000\u0325\u0326\u0006N\b\u0000\u0326"+ + "\u00a8\u0001\u0000\u0000\u0000\u0327\u0328\u00033\u0014\u0000\u0328\u0329"+ + "\u0001\u0000\u0000\u0000\u0329\u032a\u0006O\b\u0000\u032a\u00aa\u0001"+ + "\u0000\u0000\u0000\u032b\u032c\u00035\u0015\u0000\u032c\u032d\u0001\u0000"+ + "\u0000\u0000\u032d\u032e\u0006P\b\u0000\u032e\u00ac\u0001\u0000\u0000"+ + "\u0000\u032f\u0330\u0003A\u001b\u0000\u0330\u0331\u0001\u0000\u0000\u0000"+ + "\u0331\u0332\u0006Q\u000b\u0000\u0332\u0333\u0006Q\f\u0000\u0333\u00ae"+ + "\u0001\u0000\u0000\u0000\u0334\u0335\u0003\u009fJ\u0000\u0335\u0336\u0001"+ + "\u0000\u0000\u0000\u0336\u0337\u0006R\t\u0000\u0337\u00b0\u0001\u0000"+ + "\u0000\u0000\u0338\u0339\u0003\u00a1K\u0000\u0339\u033a\u0001\u0000\u0000"+ + "\u0000\u033a\u033b\u0006S\r\u0000\u033b\u00b2\u0001\u0000\u0000\u0000"+ + "\u033c\u033d\u0003e-\u0000\u033d\u033e\u0001\u0000\u0000\u0000\u033e\u033f"+ + "\u0006T\u000e\u0000\u033f\u00b4\u0001\u0000\u0000\u0000\u0340\u0341\u0003"+ + "c,\u0000\u0341\u0342\u0001\u0000\u0000\u0000\u0342\u0343\u0006U\u000f"+ + "\u0000\u0343\u00b6\u0001\u0000\u0000\u0000\u0344\u0345\u0005m\u0000\u0000"+ + "\u0345\u0346\u0005e\u0000\u0000\u0346\u0347\u0005t\u0000\u0000\u0347\u0348"+ + "\u0005a\u0000\u0000\u0348\u0349\u0005d\u0000\u0000\u0349\u034a\u0005a"+ + "\u0000\u0000\u034a\u034b\u0005t\u0000\u0000\u034b\u034c\u0005a\u0000\u0000"+ + "\u034c\u00b8\u0001\u0000\u0000\u0000\u034d\u0351\b\n\u0000\u0000\u034e"+ + "\u034f\u0005/\u0000\u0000\u034f\u0351\b\u000b\u0000\u0000\u0350\u034d"+ + "\u0001\u0000\u0000\u0000\u0350\u034e\u0001\u0000\u0000\u0000\u0351\u00ba"+ + "\u0001\u0000\u0000\u0000\u0352\u0354\u0003\u00b9W\u0000\u0353\u0352\u0001"+ + "\u0000\u0000\u0000\u0354\u0355\u0001\u0000\u0000\u0000\u0355\u0353\u0001"+ + "\u0000\u0000\u0000\u0355\u0356\u0001\u0000\u0000\u0000\u0356\u00bc\u0001"+ + "\u0000\u0000\u0000\u0357\u0358\u0003\u00a5M\u0000\u0358\u0359\u0001\u0000"+ + "\u0000\u0000\u0359\u035a\u0006Y\u0010\u0000\u035a\u00be\u0001\u0000\u0000"+ + "\u0000\u035b\u035c\u00031\u0013\u0000\u035c\u035d\u0001\u0000\u0000\u0000"+ + "\u035d\u035e\u0006Z\b\u0000\u035e\u00c0\u0001\u0000\u0000\u0000\u035f"+ + "\u0360\u00033\u0014\u0000\u0360\u0361\u0001\u0000\u0000\u0000\u0361\u0362"+ + "\u0006[\b\u0000\u0362\u00c2\u0001\u0000\u0000\u0000\u0363\u0364\u0003"+ + "5\u0015\u0000\u0364\u0365\u0001\u0000\u0000\u0000\u0365\u0366\u0006\\"+ + "\b\u0000\u0366\u00c4\u0001\u0000\u0000\u0000\u0367\u0368\u0003A\u001b"+ + "\u0000\u0368\u0369\u0001\u0000\u0000\u0000\u0369\u036a\u0006]\u000b\u0000"+ + "\u036a\u036b\u0006]\f\u0000\u036b\u00c6\u0001\u0000\u0000\u0000\u036c"+ + "\u036d\u0003i/\u0000\u036d\u036e\u0001\u0000\u0000\u0000\u036e\u036f\u0006"+ + "^\u0011\u0000\u036f\u00c8\u0001\u0000\u0000\u0000\u0370\u0371\u0003e-"+ + "\u0000\u0371\u0372\u0001\u0000\u0000\u0000\u0372\u0373\u0006_\u000e\u0000"+ + "\u0373\u00ca\u0001\u0000\u0000\u0000\u0374\u0379\u0003E\u001d\u0000\u0375"+ + "\u0379\u0003C\u001c\u0000\u0376\u0379\u0003S$\u0000\u0377\u0379\u0003"+ + "\u0099G\u0000\u0378\u0374\u0001\u0000\u0000\u0000\u0378\u0375\u0001\u0000"+ + "\u0000\u0000\u0378\u0376\u0001\u0000\u0000\u0000\u0378\u0377\u0001\u0000"+ + "\u0000\u0000\u0379\u00cc\u0001\u0000\u0000\u0000\u037a\u037d\u0003E\u001d"+ + "\u0000\u037b\u037d\u0003\u0099G\u0000\u037c\u037a\u0001\u0000\u0000\u0000"+ + "\u037c\u037b\u0001\u0000\u0000\u0000\u037d\u0381\u0001\u0000\u0000\u0000"+ + "\u037e\u0380\u0003\u00cb`\u0000\u037f\u037e\u0001\u0000\u0000\u0000\u0380"+ + "\u0383\u0001\u0000\u0000\u0000\u0381\u037f\u0001\u0000\u0000\u0000\u0381"+ + "\u0382\u0001\u0000\u0000\u0000\u0382\u038e\u0001\u0000\u0000\u0000\u0383"+ + "\u0381\u0001\u0000\u0000\u0000\u0384\u0387\u0003S$\u0000\u0385\u0387\u0003"+ + "M!\u0000\u0386\u0384\u0001\u0000\u0000\u0000\u0386\u0385\u0001\u0000\u0000"+ + "\u0000\u0387\u0389\u0001\u0000\u0000\u0000\u0388\u038a\u0003\u00cb`\u0000"+ + "\u0389\u0388\u0001\u0000\u0000\u0000\u038a\u038b\u0001\u0000\u0000\u0000"+ + "\u038b\u0389\u0001\u0000\u0000\u0000\u038b\u038c\u0001\u0000\u0000\u0000"+ + "\u038c\u038e\u0001\u0000\u0000\u0000\u038d\u037c\u0001\u0000\u0000\u0000"+ + "\u038d\u0386\u0001\u0000\u0000\u0000\u038e\u00ce\u0001\u0000\u0000\u0000"+ + "\u038f\u0390\u0003\u00cda\u0000\u0390\u0391\u0001\u0000\u0000\u0000\u0391"+ + "\u0392\u0006b\u0012\u0000\u0392\u00d0\u0001\u0000\u0000\u0000\u0393\u0394"+ + "\u0003\u00a5M\u0000\u0394\u0395\u0001\u0000\u0000\u0000\u0395\u0396\u0006"+ + "c\u0010\u0000\u0396\u00d2\u0001\u0000\u0000\u0000\u0397\u0398\u00031\u0013"+ + "\u0000\u0398\u0399\u0001\u0000\u0000\u0000\u0399\u039a\u0006d\b\u0000"+ + "\u039a\u00d4\u0001\u0000\u0000\u0000\u039b\u039c\u00033\u0014\u0000\u039c"+ + "\u039d\u0001\u0000\u0000\u0000\u039d\u039e\u0006e\b\u0000\u039e\u00d6"+ + "\u0001\u0000\u0000\u0000\u039f\u03a0\u00035\u0015\u0000\u03a0\u03a1\u0001"+ + "\u0000\u0000\u0000\u03a1\u03a2\u0006f\b\u0000\u03a2\u00d8\u0001\u0000"+ + "\u0000\u0000\u03a3\u03a4\u0003A\u001b\u0000\u03a4\u03a5\u0001\u0000\u0000"+ + "\u0000\u03a5\u03a6\u0006g\u000b\u0000\u03a6\u03a7\u0006g\f\u0000\u03a7"+ + "\u00da\u0001\u0000\u0000\u0000\u03a8\u03a9\u0003c,\u0000\u03a9\u03aa\u0001"+ + "\u0000\u0000\u0000\u03aa\u03ab\u0006h\u000f\u0000\u03ab\u00dc\u0001\u0000"+ + "\u0000\u0000\u03ac\u03ad\u0003e-\u0000\u03ad\u03ae\u0001\u0000\u0000\u0000"+ + "\u03ae\u03af\u0006i\u000e\u0000\u03af\u00de\u0001\u0000\u0000\u0000\u03b0"+ + "\u03b1\u0003i/\u0000\u03b1\u03b2\u0001\u0000\u0000\u0000\u03b2\u03b3\u0006"+ + "j\u0011\u0000\u03b3\u00e0\u0001\u0000\u0000\u0000\u03b4\u03b5\u0005a\u0000"+ + "\u0000\u03b5\u03b6\u0005s\u0000\u0000\u03b6\u00e2\u0001\u0000\u0000\u0000"+ + "\u03b7\u03b8\u0003\u00a5M\u0000\u03b8\u03b9\u0001\u0000\u0000\u0000\u03b9"+ + "\u03ba\u0006l\u0010\u0000\u03ba\u00e4\u0001\u0000\u0000\u0000\u03bb\u03bc"+ + "\u0003\u00cda\u0000\u03bc\u03bd\u0001\u0000\u0000\u0000\u03bd\u03be\u0006"+ + "m\u0012\u0000\u03be\u00e6\u0001\u0000\u0000\u0000\u03bf\u03c0\u00031\u0013"+ + "\u0000\u03c0\u03c1\u0001\u0000\u0000\u0000\u03c1\u03c2\u0006n\b\u0000"+ + "\u03c2\u00e8\u0001\u0000\u0000\u0000\u03c3\u03c4\u00033\u0014\u0000\u03c4"+ + "\u03c5\u0001\u0000\u0000\u0000\u03c5\u03c6\u0006o\b\u0000\u03c6\u00ea"+ + "\u0001\u0000\u0000\u0000\u03c7\u03c8\u00035\u0015\u0000\u03c8\u03c9\u0001"+ + "\u0000\u0000\u0000\u03c9\u03ca\u0006p\b\u0000\u03ca\u00ec\u0001\u0000"+ + "\u0000\u0000\u03cb\u03cc\u0003A\u001b\u0000\u03cc\u03cd\u0001\u0000\u0000"+ + "\u0000\u03cd\u03ce\u0006q\u000b\u0000\u03ce\u03cf\u0006q\f\u0000\u03cf"+ + "\u00ee\u0001\u0000\u0000\u0000\u03d0\u03d1\u0003\u009fJ\u0000\u03d1\u03d2"+ + "\u0001\u0000\u0000\u0000\u03d2\u03d3\u0006r\t\u0000\u03d3\u03d4\u0006"+ + "r\u0013\u0000\u03d4\u00f0\u0001\u0000\u0000\u0000\u03d5\u03d6\u0005o\u0000"+ + "\u0000\u03d6\u03d7\u0005n\u0000\u0000\u03d7\u03d8\u0001\u0000\u0000\u0000"+ + "\u03d8\u03d9\u0006s\u0014\u0000\u03d9\u00f2\u0001\u0000\u0000\u0000\u03da"+ + "\u03db\u0005w\u0000\u0000\u03db\u03dc\u0005i\u0000\u0000\u03dc\u03dd\u0005"+ + "t\u0000\u0000\u03dd\u03de\u0005h\u0000\u0000\u03de\u03df\u0001\u0000\u0000"+ + "\u0000\u03df\u03e0\u0006t\u0014\u0000\u03e0\u00f4\u0001\u0000\u0000\u0000"+ + "\u03e1\u03e2\b\f\u0000\u0000\u03e2\u00f6\u0001\u0000\u0000\u0000\u03e3"+ + "\u03e6\u0003E\u001d\u0000\u03e4\u03e6\u0003C\u001c\u0000\u03e5\u03e3\u0001"+ + "\u0000\u0000\u0000\u03e5\u03e4\u0001\u0000\u0000\u0000\u03e6\u03ea\u0001"+ + "\u0000\u0000\u0000\u03e7\u03e9\u0003\u00f5u\u0000\u03e8\u03e7\u0001\u0000"+ + "\u0000\u0000\u03e9\u03ec\u0001\u0000\u0000\u0000\u03ea\u03e8\u0001\u0000"+ + "\u0000\u0000\u03ea\u03eb\u0001\u0000\u0000\u0000\u03eb\u00f8\u0001\u0000"+ + "\u0000\u0000\u03ec\u03ea\u0001\u0000\u0000\u0000\u03ed\u03ee\u0003\u00a5"+ + "M\u0000\u03ee\u03ef\u0001\u0000\u0000\u0000\u03ef\u03f0\u0006w\u0010\u0000"+ + "\u03f0\u00fa\u0001\u0000\u0000\u0000\u03f1\u03f2\u0003\u00f7v\u0000\u03f2"+ + "\u03f3\u0001\u0000\u0000\u0000\u03f3\u03f4\u0006x\u0015\u0000\u03f4\u00fc"+ + "\u0001\u0000\u0000\u0000\u03f5\u03f6\u00031\u0013\u0000\u03f6\u03f7\u0001"+ + "\u0000\u0000\u0000\u03f7\u03f8\u0006y\b\u0000\u03f8\u00fe\u0001\u0000"+ + "\u0000\u0000\u03f9\u03fa\u00033\u0014\u0000\u03fa\u03fb\u0001\u0000\u0000"+ + "\u0000\u03fb\u03fc\u0006z\b\u0000\u03fc\u0100\u0001\u0000\u0000\u0000"+ + "\u03fd\u03fe\u00035\u0015\u0000\u03fe\u03ff\u0001\u0000\u0000\u0000\u03ff"+ + "\u0400\u0006{\b\u0000\u0400\u0102\u0001\u0000\u0000\u0000\u0401\u0402"+ + "\u0003A\u001b\u0000\u0402\u0403\u0001\u0000\u0000\u0000\u0403\u0404\u0006"+ + "|\u000b\u0000\u0404\u0405\u0006|\f\u0000\u0405\u0406\u0006|\f\u0000\u0406"+ + "\u0104\u0001\u0000\u0000\u0000\u0407\u0408\u0003c,\u0000\u0408\u0409\u0001"+ + "\u0000\u0000\u0000\u0409\u040a\u0006}\u000f\u0000\u040a\u0106\u0001\u0000"+ + "\u0000\u0000\u040b\u040c\u0003e-\u0000\u040c\u040d\u0001\u0000\u0000\u0000"+ + "\u040d\u040e\u0006~\u000e\u0000\u040e\u0108\u0001\u0000\u0000\u0000\u040f"+ + "\u0410\u0003i/\u0000\u0410\u0411\u0001\u0000\u0000\u0000\u0411\u0412\u0006"+ + "\u007f\u0011\u0000\u0412\u010a\u0001\u0000\u0000\u0000\u0413\u0414\u0003"+ + "\u00f3t\u0000\u0414\u0415\u0001\u0000\u0000\u0000\u0415\u0416\u0006\u0080"+ + "\u0016\u0000\u0416\u010c\u0001\u0000\u0000\u0000\u0417\u0418\u0003\u00cd"+ + "a\u0000\u0418\u0419\u0001\u0000\u0000\u0000\u0419\u041a\u0006\u0081\u0012"+ + "\u0000\u041a\u010e\u0001\u0000\u0000\u0000\u041b\u041c\u0003\u00a5M\u0000"+ + "\u041c\u041d\u0001\u0000\u0000\u0000\u041d\u041e\u0006\u0082\u0010\u0000"+ + "\u041e\u0110\u0001\u0000\u0000\u0000\u041f\u0420\u00031\u0013\u0000\u0420"+ + "\u0421\u0001\u0000\u0000\u0000\u0421\u0422\u0006\u0083\b\u0000\u0422\u0112"+ + "\u0001\u0000\u0000\u0000\u0423\u0424\u00033\u0014\u0000\u0424\u0425\u0001"+ + "\u0000\u0000\u0000\u0425\u0426\u0006\u0084\b\u0000\u0426\u0114\u0001\u0000"+ + "\u0000\u0000\u0427\u0428\u00035\u0015\u0000\u0428\u0429\u0001\u0000\u0000"+ + "\u0000\u0429\u042a\u0006\u0085\b\u0000\u042a\u0116\u0001\u0000\u0000\u0000"+ + "\u042b\u042c\u0003A\u001b\u0000\u042c\u042d\u0001\u0000\u0000\u0000\u042d"+ + "\u042e\u0006\u0086\u000b\u0000\u042e\u042f\u0006\u0086\f\u0000\u042f\u0118"+ + "\u0001\u0000\u0000\u0000\u0430\u0431\u0003i/\u0000\u0431\u0432\u0001\u0000"+ + "\u0000\u0000\u0432\u0433\u0006\u0087\u0011\u0000\u0433\u011a\u0001\u0000"+ + "\u0000\u0000\u0434\u0435\u0003\u00a5M\u0000\u0435\u0436\u0001\u0000\u0000"+ + "\u0000\u0436\u0437\u0006\u0088\u0010\u0000\u0437\u011c\u0001\u0000\u0000"+ + "\u0000\u0438\u0439\u0003\u00a3L\u0000\u0439\u043a\u0001\u0000\u0000\u0000"+ + "\u043a\u043b\u0006\u0089\u0017\u0000\u043b\u011e\u0001\u0000\u0000\u0000"+ + "\u043c\u043d\u00031\u0013\u0000\u043d\u043e\u0001\u0000\u0000\u0000\u043e"+ + "\u043f\u0006\u008a\b\u0000\u043f\u0120\u0001\u0000\u0000\u0000\u0440\u0441"+ + "\u00033\u0014\u0000\u0441\u0442\u0001\u0000\u0000\u0000\u0442\u0443\u0006"+ + "\u008b\b\u0000\u0443\u0122\u0001\u0000\u0000\u0000\u0444\u0445\u00035"+ + "\u0015\u0000\u0445\u0446\u0001\u0000\u0000\u0000\u0446\u0447\u0006\u008c"+ + "\b\u0000\u0447\u0124\u0001\u0000\u0000\u0000\u0448\u0449\u0003A\u001b"+ + "\u0000\u0449\u044a\u0001\u0000\u0000\u0000\u044a\u044b\u0006\u008d\u000b"+ + "\u0000\u044b\u044c\u0006\u008d\f\u0000\u044c\u0126\u0001\u0000\u0000\u0000"+ + "\u044d\u044e\u0005i\u0000\u0000\u044e\u044f\u0005n\u0000\u0000\u044f\u0450"+ + "\u0005f\u0000\u0000\u0450\u0451\u0005o\u0000\u0000\u0451\u0128\u0001\u0000"+ + "\u0000\u0000\u0452\u0453\u0005f\u0000\u0000\u0453\u0454\u0005u\u0000\u0000"+ + "\u0454\u0455\u0005n\u0000\u0000\u0455\u0456\u0005c\u0000\u0000\u0456\u0457"+ + "\u0005t\u0000\u0000\u0457\u0458\u0005i\u0000\u0000\u0458\u0459\u0005o"+ + "\u0000\u0000\u0459\u045a\u0005n\u0000\u0000\u045a\u045b\u0005s\u0000\u0000"+ + "\u045b\u012a\u0001\u0000\u0000\u0000\u045c\u045d\u00031\u0013\u0000\u045d"+ + "\u045e\u0001\u0000\u0000\u0000\u045e\u045f\u0006\u0090\b\u0000\u045f\u012c"+ + "\u0001\u0000\u0000\u0000\u0460\u0461\u00033\u0014\u0000\u0461\u0462\u0001"+ + "\u0000\u0000\u0000\u0462\u0463\u0006\u0091\b\u0000\u0463\u012e\u0001\u0000"+ + "\u0000\u0000\u0464\u0465\u00035\u0015\u0000\u0465\u0466\u0001\u0000\u0000"+ + "\u0000\u0466\u0467\u0006\u0092\b\u0000\u0467\u0130\u0001\u0000\u0000\u0000"+ + "\u0468\u0469\u0003\u00a1K\u0000\u0469\u046a\u0001\u0000\u0000\u0000\u046a"+ + "\u046b\u0006\u0093\r\u0000\u046b\u046c\u0006\u0093\f\u0000\u046c\u0132"+ + "\u0001\u0000\u0000\u0000\u046d\u046e\u0005:\u0000\u0000\u046e\u0134\u0001"+ + "\u0000\u0000\u0000\u046f\u0475\u0003M!\u0000\u0470\u0475\u0003C\u001c"+ + "\u0000\u0471\u0475\u0003i/\u0000\u0472\u0475\u0003E\u001d\u0000\u0473"+ + "\u0475\u0003S$\u0000\u0474\u046f\u0001\u0000\u0000\u0000\u0474\u0470\u0001"+ + "\u0000\u0000\u0000\u0474\u0471\u0001\u0000\u0000\u0000\u0474\u0472\u0001"+ + "\u0000\u0000\u0000\u0474\u0473\u0001\u0000\u0000\u0000\u0475\u0476\u0001"+ + "\u0000\u0000\u0000\u0476\u0474\u0001\u0000\u0000\u0000\u0476\u0477\u0001"+ + "\u0000\u0000\u0000\u0477\u0136\u0001\u0000\u0000\u0000\u0478\u0479\u0003"+ + "1\u0013\u0000\u0479\u047a\u0001\u0000\u0000\u0000\u047a\u047b\u0006\u0096"+ + "\b\u0000\u047b\u0138\u0001\u0000\u0000\u0000\u047c\u047d\u00033\u0014"+ + "\u0000\u047d\u047e\u0001\u0000\u0000\u0000\u047e\u047f\u0006\u0097\b\u0000"+ + "\u047f\u013a\u0001\u0000\u0000\u0000\u0480\u0481\u00035\u0015\u0000\u0481"+ + "\u0482\u0001\u0000\u0000\u0000\u0482\u0483\u0006\u0098\b\u0000\u0483\u013c"+ + "\u0001\u0000\u0000\u00006\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007"+ + "\b\t\n\u01d9\u01e3\u01e7\u01ea\u01f3\u01f5\u0200\u0229\u022e\u0237\u023e"+ + "\u0243\u0245\u0250\u0258\u025b\u025d\u0262\u0267\u026d\u0274\u0279\u027f"+ + "\u0282\u028a\u028e\u030d\u0312\u0317\u0319\u031f\u0350\u0355\u0378\u037c"+ + "\u0381\u0386\u038b\u038d\u03e5\u03ea\u0474\u0476\u0018\u0005\u0002\u0000"+ + "\u0005\u0004\u0000\u0005\u0006\u0000\u0005\u0001\u0000\u0005\u0003\u0000"+ + "\u0005\b\u0000\u0005\u0005\u0000\u0005\t\u0000\u0000\u0001\u0000\u0007"+ + "?\u0000\u0005\u0000\u0000\u0007\u001a\u0000\u0004\u0000\u0000\u0007@\u0000"+ + "\u0007\"\u0000\u0007!\u0000\u0007B\u0000\u0007$\u0000\u0007K\u0000\u0005"+ + "\n\u0000\u0005\u0007\u0000\u0007U\u0000\u0007T\u0000\u0007A\u0000"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp index 3acc73b1b592c..823e56b88b0dd 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp @@ -93,11 +93,17 @@ null null null null +null 'info' 'functions' null null null +':' +null +null +null +null token symbolic names: null @@ -175,7 +181,7 @@ FROM_UNQUOTED_IDENTIFIER FROM_LINE_COMMENT FROM_MULTILINE_COMMENT FROM_WS -PROJECT_UNQUOTED_IDENTIFIER +UNQUOTED_ID_PATTERN PROJECT_LINE_COMMENT PROJECT_MULTILINE_COMMENT PROJECT_WS @@ -185,6 +191,7 @@ RENAME_MULTILINE_COMMENT RENAME_WS ON WITH +ENRICH_POLICY_NAME ENRICH_LINE_COMMENT ENRICH_MULTILINE_COMMENT ENRICH_WS @@ -199,6 +206,11 @@ FUNCTIONS SHOW_LINE_COMMENT SHOW_MULTILINE_COMMENT SHOW_WS +COLON +SETTING +SETTING_LINE_COMMENT +SETTTING_MULTILINE_COMMENT +SETTING_WS rule names: singleStatement @@ -250,7 +262,8 @@ subqueryExpression showCommand enrichCommand enrichWithClause +setting atn: -[4, 1, 98, 519, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 108, 8, 1, 10, 1, 12, 1, 111, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 117, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 132, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 144, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 151, 8, 5, 10, 5, 12, 5, 154, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 161, 8, 5, 1, 5, 1, 5, 3, 5, 165, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 173, 8, 5, 10, 5, 12, 5, 176, 9, 5, 1, 6, 1, 6, 3, 6, 180, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 187, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 192, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 3, 7, 199, 8, 7, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 205, 8, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 5, 8, 213, 8, 8, 10, 8, 12, 8, 216, 9, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 225, 8, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 5, 10, 233, 8, 10, 10, 10, 12, 10, 236, 9, 10, 3, 10, 238, 8, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 5, 12, 248, 8, 12, 10, 12, 12, 12, 251, 9, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 3, 13, 258, 8, 13, 1, 14, 1, 14, 1, 14, 1, 14, 5, 14, 264, 8, 14, 10, 14, 12, 14, 267, 9, 14, 1, 14, 3, 14, 270, 8, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 5, 15, 277, 8, 15, 10, 15, 12, 15, 280, 9, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 3, 17, 289, 8, 17, 1, 17, 1, 17, 3, 17, 293, 8, 17, 1, 18, 1, 18, 1, 18, 1, 18, 3, 18, 299, 8, 18, 1, 19, 1, 19, 1, 19, 5, 19, 304, 8, 19, 10, 19, 12, 19, 307, 9, 19, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 5, 21, 314, 8, 21, 10, 21, 12, 21, 317, 9, 21, 1, 22, 1, 22, 1, 22, 5, 22, 322, 8, 22, 10, 22, 12, 22, 325, 9, 22, 1, 23, 1, 23, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 5, 25, 344, 8, 25, 10, 25, 12, 25, 347, 9, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 5, 25, 355, 8, 25, 10, 25, 12, 25, 358, 9, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 5, 25, 366, 8, 25, 10, 25, 12, 25, 369, 9, 25, 1, 25, 1, 25, 3, 25, 373, 8, 25, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 1, 27, 1, 27, 5, 27, 382, 8, 27, 10, 27, 12, 27, 385, 9, 27, 1, 28, 1, 28, 3, 28, 389, 8, 28, 1, 28, 1, 28, 3, 28, 393, 8, 28, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 399, 8, 29, 10, 29, 12, 29, 402, 9, 29, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 408, 8, 29, 10, 29, 12, 29, 411, 9, 29, 3, 29, 413, 8, 29, 1, 30, 1, 30, 1, 30, 1, 30, 5, 30, 419, 8, 30, 10, 30, 12, 30, 422, 9, 30, 1, 31, 1, 31, 1, 31, 1, 31, 5, 31, 428, 8, 31, 10, 31, 12, 31, 431, 9, 31, 1, 32, 1, 32, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 1, 33, 3, 33, 441, 8, 33, 1, 34, 1, 34, 1, 34, 1, 34, 1, 35, 1, 35, 1, 35, 1, 36, 1, 36, 1, 36, 5, 36, 453, 8, 36, 10, 36, 12, 36, 456, 9, 36, 1, 37, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 39, 1, 39, 3, 39, 466, 8, 39, 1, 40, 3, 40, 469, 8, 40, 1, 40, 1, 40, 1, 41, 3, 41, 474, 8, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 45, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 46, 3, 46, 493, 8, 46, 1, 47, 1, 47, 1, 47, 1, 47, 3, 47, 499, 8, 47, 1, 47, 1, 47, 1, 47, 1, 47, 5, 47, 505, 8, 47, 10, 47, 12, 47, 508, 9, 47, 3, 47, 510, 8, 47, 1, 48, 1, 48, 1, 48, 3, 48, 515, 8, 48, 1, 48, 1, 48, 1, 48, 0, 3, 2, 10, 16, 49, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 0, 9, 1, 0, 58, 59, 1, 0, 60, 62, 2, 0, 66, 66, 71, 71, 1, 0, 65, 66, 2, 0, 66, 66, 75, 75, 2, 0, 32, 32, 35, 35, 1, 0, 38, 39, 2, 0, 37, 37, 51, 51, 1, 0, 52, 57, 548, 0, 98, 1, 0, 0, 0, 2, 101, 1, 0, 0, 0, 4, 116, 1, 0, 0, 0, 6, 131, 1, 0, 0, 0, 8, 133, 1, 0, 0, 0, 10, 164, 1, 0, 0, 0, 12, 191, 1, 0, 0, 0, 14, 198, 1, 0, 0, 0, 16, 204, 1, 0, 0, 0, 18, 224, 1, 0, 0, 0, 20, 226, 1, 0, 0, 0, 22, 241, 1, 0, 0, 0, 24, 244, 1, 0, 0, 0, 26, 257, 1, 0, 0, 0, 28, 259, 1, 0, 0, 0, 30, 271, 1, 0, 0, 0, 32, 283, 1, 0, 0, 0, 34, 286, 1, 0, 0, 0, 36, 294, 1, 0, 0, 0, 38, 300, 1, 0, 0, 0, 40, 308, 1, 0, 0, 0, 42, 310, 1, 0, 0, 0, 44, 318, 1, 0, 0, 0, 46, 326, 1, 0, 0, 0, 48, 328, 1, 0, 0, 0, 50, 372, 1, 0, 0, 0, 52, 374, 1, 0, 0, 0, 54, 377, 1, 0, 0, 0, 56, 386, 1, 0, 0, 0, 58, 412, 1, 0, 0, 0, 60, 414, 1, 0, 0, 0, 62, 423, 1, 0, 0, 0, 64, 432, 1, 0, 0, 0, 66, 436, 1, 0, 0, 0, 68, 442, 1, 0, 0, 0, 70, 446, 1, 0, 0, 0, 72, 449, 1, 0, 0, 0, 74, 457, 1, 0, 0, 0, 76, 461, 1, 0, 0, 0, 78, 465, 1, 0, 0, 0, 80, 468, 1, 0, 0, 0, 82, 473, 1, 0, 0, 0, 84, 477, 1, 0, 0, 0, 86, 479, 1, 0, 0, 0, 88, 481, 1, 0, 0, 0, 90, 484, 1, 0, 0, 0, 92, 492, 1, 0, 0, 0, 94, 494, 1, 0, 0, 0, 96, 514, 1, 0, 0, 0, 98, 99, 3, 2, 1, 0, 99, 100, 5, 0, 0, 1, 100, 1, 1, 0, 0, 0, 101, 102, 6, 1, -1, 0, 102, 103, 3, 4, 2, 0, 103, 109, 1, 0, 0, 0, 104, 105, 10, 1, 0, 0, 105, 106, 5, 26, 0, 0, 106, 108, 3, 6, 3, 0, 107, 104, 1, 0, 0, 0, 108, 111, 1, 0, 0, 0, 109, 107, 1, 0, 0, 0, 109, 110, 1, 0, 0, 0, 110, 3, 1, 0, 0, 0, 111, 109, 1, 0, 0, 0, 112, 117, 3, 88, 44, 0, 113, 117, 3, 28, 14, 0, 114, 117, 3, 22, 11, 0, 115, 117, 3, 92, 46, 0, 116, 112, 1, 0, 0, 0, 116, 113, 1, 0, 0, 0, 116, 114, 1, 0, 0, 0, 116, 115, 1, 0, 0, 0, 117, 5, 1, 0, 0, 0, 118, 132, 3, 32, 16, 0, 119, 132, 3, 36, 18, 0, 120, 132, 3, 52, 26, 0, 121, 132, 3, 58, 29, 0, 122, 132, 3, 54, 27, 0, 123, 132, 3, 34, 17, 0, 124, 132, 3, 8, 4, 0, 125, 132, 3, 60, 30, 0, 126, 132, 3, 62, 31, 0, 127, 132, 3, 66, 33, 0, 128, 132, 3, 68, 34, 0, 129, 132, 3, 94, 47, 0, 130, 132, 3, 70, 35, 0, 131, 118, 1, 0, 0, 0, 131, 119, 1, 0, 0, 0, 131, 120, 1, 0, 0, 0, 131, 121, 1, 0, 0, 0, 131, 122, 1, 0, 0, 0, 131, 123, 1, 0, 0, 0, 131, 124, 1, 0, 0, 0, 131, 125, 1, 0, 0, 0, 131, 126, 1, 0, 0, 0, 131, 127, 1, 0, 0, 0, 131, 128, 1, 0, 0, 0, 131, 129, 1, 0, 0, 0, 131, 130, 1, 0, 0, 0, 132, 7, 1, 0, 0, 0, 133, 134, 5, 18, 0, 0, 134, 135, 3, 10, 5, 0, 135, 9, 1, 0, 0, 0, 136, 137, 6, 5, -1, 0, 137, 138, 5, 44, 0, 0, 138, 165, 3, 10, 5, 7, 139, 165, 3, 14, 7, 0, 140, 165, 3, 12, 6, 0, 141, 143, 3, 14, 7, 0, 142, 144, 5, 44, 0, 0, 143, 142, 1, 0, 0, 0, 143, 144, 1, 0, 0, 0, 144, 145, 1, 0, 0, 0, 145, 146, 5, 41, 0, 0, 146, 147, 5, 40, 0, 0, 147, 152, 3, 14, 7, 0, 148, 149, 5, 34, 0, 0, 149, 151, 3, 14, 7, 0, 150, 148, 1, 0, 0, 0, 151, 154, 1, 0, 0, 0, 152, 150, 1, 0, 0, 0, 152, 153, 1, 0, 0, 0, 153, 155, 1, 0, 0, 0, 154, 152, 1, 0, 0, 0, 155, 156, 5, 50, 0, 0, 156, 165, 1, 0, 0, 0, 157, 158, 3, 14, 7, 0, 158, 160, 5, 42, 0, 0, 159, 161, 5, 44, 0, 0, 160, 159, 1, 0, 0, 0, 160, 161, 1, 0, 0, 0, 161, 162, 1, 0, 0, 0, 162, 163, 5, 45, 0, 0, 163, 165, 1, 0, 0, 0, 164, 136, 1, 0, 0, 0, 164, 139, 1, 0, 0, 0, 164, 140, 1, 0, 0, 0, 164, 141, 1, 0, 0, 0, 164, 157, 1, 0, 0, 0, 165, 174, 1, 0, 0, 0, 166, 167, 10, 4, 0, 0, 167, 168, 5, 31, 0, 0, 168, 173, 3, 10, 5, 5, 169, 170, 10, 3, 0, 0, 170, 171, 5, 47, 0, 0, 171, 173, 3, 10, 5, 4, 172, 166, 1, 0, 0, 0, 172, 169, 1, 0, 0, 0, 173, 176, 1, 0, 0, 0, 174, 172, 1, 0, 0, 0, 174, 175, 1, 0, 0, 0, 175, 11, 1, 0, 0, 0, 176, 174, 1, 0, 0, 0, 177, 179, 3, 14, 7, 0, 178, 180, 5, 44, 0, 0, 179, 178, 1, 0, 0, 0, 179, 180, 1, 0, 0, 0, 180, 181, 1, 0, 0, 0, 181, 182, 5, 43, 0, 0, 182, 183, 3, 84, 42, 0, 183, 192, 1, 0, 0, 0, 184, 186, 3, 14, 7, 0, 185, 187, 5, 44, 0, 0, 186, 185, 1, 0, 0, 0, 186, 187, 1, 0, 0, 0, 187, 188, 1, 0, 0, 0, 188, 189, 5, 49, 0, 0, 189, 190, 3, 84, 42, 0, 190, 192, 1, 0, 0, 0, 191, 177, 1, 0, 0, 0, 191, 184, 1, 0, 0, 0, 192, 13, 1, 0, 0, 0, 193, 199, 3, 16, 8, 0, 194, 195, 3, 16, 8, 0, 195, 196, 3, 86, 43, 0, 196, 197, 3, 16, 8, 0, 197, 199, 1, 0, 0, 0, 198, 193, 1, 0, 0, 0, 198, 194, 1, 0, 0, 0, 199, 15, 1, 0, 0, 0, 200, 201, 6, 8, -1, 0, 201, 205, 3, 18, 9, 0, 202, 203, 7, 0, 0, 0, 203, 205, 3, 16, 8, 3, 204, 200, 1, 0, 0, 0, 204, 202, 1, 0, 0, 0, 205, 214, 1, 0, 0, 0, 206, 207, 10, 2, 0, 0, 207, 208, 7, 1, 0, 0, 208, 213, 3, 16, 8, 3, 209, 210, 10, 1, 0, 0, 210, 211, 7, 0, 0, 0, 211, 213, 3, 16, 8, 2, 212, 206, 1, 0, 0, 0, 212, 209, 1, 0, 0, 0, 213, 216, 1, 0, 0, 0, 214, 212, 1, 0, 0, 0, 214, 215, 1, 0, 0, 0, 215, 17, 1, 0, 0, 0, 216, 214, 1, 0, 0, 0, 217, 225, 3, 50, 25, 0, 218, 225, 3, 42, 21, 0, 219, 225, 3, 20, 10, 0, 220, 221, 5, 40, 0, 0, 221, 222, 3, 10, 5, 0, 222, 223, 5, 50, 0, 0, 223, 225, 1, 0, 0, 0, 224, 217, 1, 0, 0, 0, 224, 218, 1, 0, 0, 0, 224, 219, 1, 0, 0, 0, 224, 220, 1, 0, 0, 0, 225, 19, 1, 0, 0, 0, 226, 227, 3, 46, 23, 0, 227, 237, 5, 40, 0, 0, 228, 238, 5, 60, 0, 0, 229, 234, 3, 10, 5, 0, 230, 231, 5, 34, 0, 0, 231, 233, 3, 10, 5, 0, 232, 230, 1, 0, 0, 0, 233, 236, 1, 0, 0, 0, 234, 232, 1, 0, 0, 0, 234, 235, 1, 0, 0, 0, 235, 238, 1, 0, 0, 0, 236, 234, 1, 0, 0, 0, 237, 228, 1, 0, 0, 0, 237, 229, 1, 0, 0, 0, 237, 238, 1, 0, 0, 0, 238, 239, 1, 0, 0, 0, 239, 240, 5, 50, 0, 0, 240, 21, 1, 0, 0, 0, 241, 242, 5, 14, 0, 0, 242, 243, 3, 24, 12, 0, 243, 23, 1, 0, 0, 0, 244, 249, 3, 26, 13, 0, 245, 246, 5, 34, 0, 0, 246, 248, 3, 26, 13, 0, 247, 245, 1, 0, 0, 0, 248, 251, 1, 0, 0, 0, 249, 247, 1, 0, 0, 0, 249, 250, 1, 0, 0, 0, 250, 25, 1, 0, 0, 0, 251, 249, 1, 0, 0, 0, 252, 258, 3, 10, 5, 0, 253, 254, 3, 42, 21, 0, 254, 255, 5, 33, 0, 0, 255, 256, 3, 10, 5, 0, 256, 258, 1, 0, 0, 0, 257, 252, 1, 0, 0, 0, 257, 253, 1, 0, 0, 0, 258, 27, 1, 0, 0, 0, 259, 260, 5, 6, 0, 0, 260, 265, 3, 40, 20, 0, 261, 262, 5, 34, 0, 0, 262, 264, 3, 40, 20, 0, 263, 261, 1, 0, 0, 0, 264, 267, 1, 0, 0, 0, 265, 263, 1, 0, 0, 0, 265, 266, 1, 0, 0, 0, 266, 269, 1, 0, 0, 0, 267, 265, 1, 0, 0, 0, 268, 270, 3, 30, 15, 0, 269, 268, 1, 0, 0, 0, 269, 270, 1, 0, 0, 0, 270, 29, 1, 0, 0, 0, 271, 272, 5, 63, 0, 0, 272, 273, 5, 70, 0, 0, 273, 278, 3, 40, 20, 0, 274, 275, 5, 34, 0, 0, 275, 277, 3, 40, 20, 0, 276, 274, 1, 0, 0, 0, 277, 280, 1, 0, 0, 0, 278, 276, 1, 0, 0, 0, 278, 279, 1, 0, 0, 0, 279, 281, 1, 0, 0, 0, 280, 278, 1, 0, 0, 0, 281, 282, 5, 64, 0, 0, 282, 31, 1, 0, 0, 0, 283, 284, 5, 4, 0, 0, 284, 285, 3, 24, 12, 0, 285, 33, 1, 0, 0, 0, 286, 288, 5, 17, 0, 0, 287, 289, 3, 24, 12, 0, 288, 287, 1, 0, 0, 0, 288, 289, 1, 0, 0, 0, 289, 292, 1, 0, 0, 0, 290, 291, 5, 30, 0, 0, 291, 293, 3, 38, 19, 0, 292, 290, 1, 0, 0, 0, 292, 293, 1, 0, 0, 0, 293, 35, 1, 0, 0, 0, 294, 295, 5, 8, 0, 0, 295, 298, 3, 24, 12, 0, 296, 297, 5, 30, 0, 0, 297, 299, 3, 38, 19, 0, 298, 296, 1, 0, 0, 0, 298, 299, 1, 0, 0, 0, 299, 37, 1, 0, 0, 0, 300, 305, 3, 42, 21, 0, 301, 302, 5, 34, 0, 0, 302, 304, 3, 42, 21, 0, 303, 301, 1, 0, 0, 0, 304, 307, 1, 0, 0, 0, 305, 303, 1, 0, 0, 0, 305, 306, 1, 0, 0, 0, 306, 39, 1, 0, 0, 0, 307, 305, 1, 0, 0, 0, 308, 309, 7, 2, 0, 0, 309, 41, 1, 0, 0, 0, 310, 315, 3, 46, 23, 0, 311, 312, 5, 36, 0, 0, 312, 314, 3, 46, 23, 0, 313, 311, 1, 0, 0, 0, 314, 317, 1, 0, 0, 0, 315, 313, 1, 0, 0, 0, 315, 316, 1, 0, 0, 0, 316, 43, 1, 0, 0, 0, 317, 315, 1, 0, 0, 0, 318, 323, 3, 48, 24, 0, 319, 320, 5, 36, 0, 0, 320, 322, 3, 48, 24, 0, 321, 319, 1, 0, 0, 0, 322, 325, 1, 0, 0, 0, 323, 321, 1, 0, 0, 0, 323, 324, 1, 0, 0, 0, 324, 45, 1, 0, 0, 0, 325, 323, 1, 0, 0, 0, 326, 327, 7, 3, 0, 0, 327, 47, 1, 0, 0, 0, 328, 329, 7, 4, 0, 0, 329, 49, 1, 0, 0, 0, 330, 373, 5, 45, 0, 0, 331, 332, 3, 82, 41, 0, 332, 333, 5, 65, 0, 0, 333, 373, 1, 0, 0, 0, 334, 373, 3, 80, 40, 0, 335, 373, 3, 82, 41, 0, 336, 373, 3, 76, 38, 0, 337, 373, 5, 48, 0, 0, 338, 373, 3, 84, 42, 0, 339, 340, 5, 63, 0, 0, 340, 345, 3, 78, 39, 0, 341, 342, 5, 34, 0, 0, 342, 344, 3, 78, 39, 0, 343, 341, 1, 0, 0, 0, 344, 347, 1, 0, 0, 0, 345, 343, 1, 0, 0, 0, 345, 346, 1, 0, 0, 0, 346, 348, 1, 0, 0, 0, 347, 345, 1, 0, 0, 0, 348, 349, 5, 64, 0, 0, 349, 373, 1, 0, 0, 0, 350, 351, 5, 63, 0, 0, 351, 356, 3, 76, 38, 0, 352, 353, 5, 34, 0, 0, 353, 355, 3, 76, 38, 0, 354, 352, 1, 0, 0, 0, 355, 358, 1, 0, 0, 0, 356, 354, 1, 0, 0, 0, 356, 357, 1, 0, 0, 0, 357, 359, 1, 0, 0, 0, 358, 356, 1, 0, 0, 0, 359, 360, 5, 64, 0, 0, 360, 373, 1, 0, 0, 0, 361, 362, 5, 63, 0, 0, 362, 367, 3, 84, 42, 0, 363, 364, 5, 34, 0, 0, 364, 366, 3, 84, 42, 0, 365, 363, 1, 0, 0, 0, 366, 369, 1, 0, 0, 0, 367, 365, 1, 0, 0, 0, 367, 368, 1, 0, 0, 0, 368, 370, 1, 0, 0, 0, 369, 367, 1, 0, 0, 0, 370, 371, 5, 64, 0, 0, 371, 373, 1, 0, 0, 0, 372, 330, 1, 0, 0, 0, 372, 331, 1, 0, 0, 0, 372, 334, 1, 0, 0, 0, 372, 335, 1, 0, 0, 0, 372, 336, 1, 0, 0, 0, 372, 337, 1, 0, 0, 0, 372, 338, 1, 0, 0, 0, 372, 339, 1, 0, 0, 0, 372, 350, 1, 0, 0, 0, 372, 361, 1, 0, 0, 0, 373, 51, 1, 0, 0, 0, 374, 375, 5, 10, 0, 0, 375, 376, 5, 28, 0, 0, 376, 53, 1, 0, 0, 0, 377, 378, 5, 16, 0, 0, 378, 383, 3, 56, 28, 0, 379, 380, 5, 34, 0, 0, 380, 382, 3, 56, 28, 0, 381, 379, 1, 0, 0, 0, 382, 385, 1, 0, 0, 0, 383, 381, 1, 0, 0, 0, 383, 384, 1, 0, 0, 0, 384, 55, 1, 0, 0, 0, 385, 383, 1, 0, 0, 0, 386, 388, 3, 10, 5, 0, 387, 389, 7, 5, 0, 0, 388, 387, 1, 0, 0, 0, 388, 389, 1, 0, 0, 0, 389, 392, 1, 0, 0, 0, 390, 391, 5, 46, 0, 0, 391, 393, 7, 6, 0, 0, 392, 390, 1, 0, 0, 0, 392, 393, 1, 0, 0, 0, 393, 57, 1, 0, 0, 0, 394, 395, 5, 9, 0, 0, 395, 400, 3, 44, 22, 0, 396, 397, 5, 34, 0, 0, 397, 399, 3, 44, 22, 0, 398, 396, 1, 0, 0, 0, 399, 402, 1, 0, 0, 0, 400, 398, 1, 0, 0, 0, 400, 401, 1, 0, 0, 0, 401, 413, 1, 0, 0, 0, 402, 400, 1, 0, 0, 0, 403, 404, 5, 12, 0, 0, 404, 409, 3, 44, 22, 0, 405, 406, 5, 34, 0, 0, 406, 408, 3, 44, 22, 0, 407, 405, 1, 0, 0, 0, 408, 411, 1, 0, 0, 0, 409, 407, 1, 0, 0, 0, 409, 410, 1, 0, 0, 0, 410, 413, 1, 0, 0, 0, 411, 409, 1, 0, 0, 0, 412, 394, 1, 0, 0, 0, 412, 403, 1, 0, 0, 0, 413, 59, 1, 0, 0, 0, 414, 415, 5, 2, 0, 0, 415, 420, 3, 44, 22, 0, 416, 417, 5, 34, 0, 0, 417, 419, 3, 44, 22, 0, 418, 416, 1, 0, 0, 0, 419, 422, 1, 0, 0, 0, 420, 418, 1, 0, 0, 0, 420, 421, 1, 0, 0, 0, 421, 61, 1, 0, 0, 0, 422, 420, 1, 0, 0, 0, 423, 424, 5, 13, 0, 0, 424, 429, 3, 64, 32, 0, 425, 426, 5, 34, 0, 0, 426, 428, 3, 64, 32, 0, 427, 425, 1, 0, 0, 0, 428, 431, 1, 0, 0, 0, 429, 427, 1, 0, 0, 0, 429, 430, 1, 0, 0, 0, 430, 63, 1, 0, 0, 0, 431, 429, 1, 0, 0, 0, 432, 433, 3, 44, 22, 0, 433, 434, 5, 79, 0, 0, 434, 435, 3, 44, 22, 0, 435, 65, 1, 0, 0, 0, 436, 437, 5, 1, 0, 0, 437, 438, 3, 18, 9, 0, 438, 440, 3, 84, 42, 0, 439, 441, 3, 72, 36, 0, 440, 439, 1, 0, 0, 0, 440, 441, 1, 0, 0, 0, 441, 67, 1, 0, 0, 0, 442, 443, 5, 7, 0, 0, 443, 444, 3, 18, 9, 0, 444, 445, 3, 84, 42, 0, 445, 69, 1, 0, 0, 0, 446, 447, 5, 11, 0, 0, 447, 448, 3, 42, 21, 0, 448, 71, 1, 0, 0, 0, 449, 454, 3, 74, 37, 0, 450, 451, 5, 34, 0, 0, 451, 453, 3, 74, 37, 0, 452, 450, 1, 0, 0, 0, 453, 456, 1, 0, 0, 0, 454, 452, 1, 0, 0, 0, 454, 455, 1, 0, 0, 0, 455, 73, 1, 0, 0, 0, 456, 454, 1, 0, 0, 0, 457, 458, 3, 46, 23, 0, 458, 459, 5, 33, 0, 0, 459, 460, 3, 50, 25, 0, 460, 75, 1, 0, 0, 0, 461, 462, 7, 7, 0, 0, 462, 77, 1, 0, 0, 0, 463, 466, 3, 80, 40, 0, 464, 466, 3, 82, 41, 0, 465, 463, 1, 0, 0, 0, 465, 464, 1, 0, 0, 0, 466, 79, 1, 0, 0, 0, 467, 469, 7, 0, 0, 0, 468, 467, 1, 0, 0, 0, 468, 469, 1, 0, 0, 0, 469, 470, 1, 0, 0, 0, 470, 471, 5, 29, 0, 0, 471, 81, 1, 0, 0, 0, 472, 474, 7, 0, 0, 0, 473, 472, 1, 0, 0, 0, 473, 474, 1, 0, 0, 0, 474, 475, 1, 0, 0, 0, 475, 476, 5, 28, 0, 0, 476, 83, 1, 0, 0, 0, 477, 478, 5, 27, 0, 0, 478, 85, 1, 0, 0, 0, 479, 480, 7, 8, 0, 0, 480, 87, 1, 0, 0, 0, 481, 482, 5, 5, 0, 0, 482, 483, 3, 90, 45, 0, 483, 89, 1, 0, 0, 0, 484, 485, 5, 63, 0, 0, 485, 486, 3, 2, 1, 0, 486, 487, 5, 64, 0, 0, 487, 91, 1, 0, 0, 0, 488, 489, 5, 15, 0, 0, 489, 493, 5, 94, 0, 0, 490, 491, 5, 15, 0, 0, 491, 493, 5, 95, 0, 0, 492, 488, 1, 0, 0, 0, 492, 490, 1, 0, 0, 0, 493, 93, 1, 0, 0, 0, 494, 495, 5, 3, 0, 0, 495, 498, 3, 40, 20, 0, 496, 497, 5, 83, 0, 0, 497, 499, 3, 44, 22, 0, 498, 496, 1, 0, 0, 0, 498, 499, 1, 0, 0, 0, 499, 509, 1, 0, 0, 0, 500, 501, 5, 84, 0, 0, 501, 506, 3, 96, 48, 0, 502, 503, 5, 34, 0, 0, 503, 505, 3, 96, 48, 0, 504, 502, 1, 0, 0, 0, 505, 508, 1, 0, 0, 0, 506, 504, 1, 0, 0, 0, 506, 507, 1, 0, 0, 0, 507, 510, 1, 0, 0, 0, 508, 506, 1, 0, 0, 0, 509, 500, 1, 0, 0, 0, 509, 510, 1, 0, 0, 0, 510, 95, 1, 0, 0, 0, 511, 512, 3, 44, 22, 0, 512, 513, 5, 33, 0, 0, 513, 515, 1, 0, 0, 0, 514, 511, 1, 0, 0, 0, 514, 515, 1, 0, 0, 0, 515, 516, 1, 0, 0, 0, 516, 517, 3, 44, 22, 0, 517, 97, 1, 0, 0, 0, 52, 109, 116, 131, 143, 152, 160, 164, 172, 174, 179, 186, 191, 198, 204, 212, 214, 224, 234, 237, 249, 257, 265, 269, 278, 288, 292, 298, 305, 315, 323, 345, 356, 367, 372, 383, 388, 392, 400, 409, 412, 420, 429, 440, 454, 465, 468, 473, 492, 498, 506, 509, 514] \ No newline at end of file +[4, 1, 104, 533, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 110, 8, 1, 10, 1, 12, 1, 113, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 119, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 134, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 146, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 153, 8, 5, 10, 5, 12, 5, 156, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 163, 8, 5, 1, 5, 1, 5, 3, 5, 167, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 175, 8, 5, 10, 5, 12, 5, 178, 9, 5, 1, 6, 1, 6, 3, 6, 182, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 189, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 194, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 3, 7, 201, 8, 7, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 207, 8, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 5, 8, 215, 8, 8, 10, 8, 12, 8, 218, 9, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 227, 8, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 5, 10, 235, 8, 10, 10, 10, 12, 10, 238, 9, 10, 3, 10, 240, 8, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 5, 12, 250, 8, 12, 10, 12, 12, 12, 253, 9, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 3, 13, 260, 8, 13, 1, 14, 1, 14, 1, 14, 1, 14, 5, 14, 266, 8, 14, 10, 14, 12, 14, 269, 9, 14, 1, 14, 3, 14, 272, 8, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 5, 15, 279, 8, 15, 10, 15, 12, 15, 282, 9, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 3, 17, 291, 8, 17, 1, 17, 1, 17, 3, 17, 295, 8, 17, 1, 18, 1, 18, 1, 18, 1, 18, 3, 18, 301, 8, 18, 1, 19, 1, 19, 1, 19, 5, 19, 306, 8, 19, 10, 19, 12, 19, 309, 9, 19, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 5, 21, 316, 8, 21, 10, 21, 12, 21, 319, 9, 21, 1, 22, 1, 22, 1, 22, 5, 22, 324, 8, 22, 10, 22, 12, 22, 327, 9, 22, 1, 23, 1, 23, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 5, 25, 346, 8, 25, 10, 25, 12, 25, 349, 9, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 5, 25, 357, 8, 25, 10, 25, 12, 25, 360, 9, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 5, 25, 368, 8, 25, 10, 25, 12, 25, 371, 9, 25, 1, 25, 1, 25, 3, 25, 375, 8, 25, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 1, 27, 1, 27, 5, 27, 384, 8, 27, 10, 27, 12, 27, 387, 9, 27, 1, 28, 1, 28, 3, 28, 391, 8, 28, 1, 28, 1, 28, 3, 28, 395, 8, 28, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 401, 8, 29, 10, 29, 12, 29, 404, 9, 29, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 410, 8, 29, 10, 29, 12, 29, 413, 9, 29, 3, 29, 415, 8, 29, 1, 30, 1, 30, 1, 30, 1, 30, 5, 30, 421, 8, 30, 10, 30, 12, 30, 424, 9, 30, 1, 31, 1, 31, 1, 31, 1, 31, 5, 31, 430, 8, 31, 10, 31, 12, 31, 433, 9, 31, 1, 32, 1, 32, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 1, 33, 3, 33, 443, 8, 33, 1, 34, 1, 34, 1, 34, 1, 34, 1, 35, 1, 35, 1, 35, 1, 36, 1, 36, 1, 36, 5, 36, 455, 8, 36, 10, 36, 12, 36, 458, 9, 36, 1, 37, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 39, 1, 39, 3, 39, 468, 8, 39, 1, 40, 3, 40, 471, 8, 40, 1, 40, 1, 40, 1, 41, 3, 41, 476, 8, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 45, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 46, 3, 46, 495, 8, 46, 1, 47, 1, 47, 5, 47, 499, 8, 47, 10, 47, 12, 47, 502, 9, 47, 1, 47, 1, 47, 1, 47, 3, 47, 507, 8, 47, 1, 47, 1, 47, 1, 47, 1, 47, 5, 47, 513, 8, 47, 10, 47, 12, 47, 516, 9, 47, 3, 47, 518, 8, 47, 1, 48, 1, 48, 1, 48, 3, 48, 523, 8, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 0, 3, 2, 10, 16, 50, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 0, 9, 1, 0, 58, 59, 1, 0, 60, 62, 2, 0, 66, 66, 71, 71, 1, 0, 65, 66, 2, 0, 66, 66, 75, 75, 2, 0, 32, 32, 35, 35, 1, 0, 38, 39, 2, 0, 37, 37, 51, 51, 1, 0, 52, 57, 562, 0, 100, 1, 0, 0, 0, 2, 103, 1, 0, 0, 0, 4, 118, 1, 0, 0, 0, 6, 133, 1, 0, 0, 0, 8, 135, 1, 0, 0, 0, 10, 166, 1, 0, 0, 0, 12, 193, 1, 0, 0, 0, 14, 200, 1, 0, 0, 0, 16, 206, 1, 0, 0, 0, 18, 226, 1, 0, 0, 0, 20, 228, 1, 0, 0, 0, 22, 243, 1, 0, 0, 0, 24, 246, 1, 0, 0, 0, 26, 259, 1, 0, 0, 0, 28, 261, 1, 0, 0, 0, 30, 273, 1, 0, 0, 0, 32, 285, 1, 0, 0, 0, 34, 288, 1, 0, 0, 0, 36, 296, 1, 0, 0, 0, 38, 302, 1, 0, 0, 0, 40, 310, 1, 0, 0, 0, 42, 312, 1, 0, 0, 0, 44, 320, 1, 0, 0, 0, 46, 328, 1, 0, 0, 0, 48, 330, 1, 0, 0, 0, 50, 374, 1, 0, 0, 0, 52, 376, 1, 0, 0, 0, 54, 379, 1, 0, 0, 0, 56, 388, 1, 0, 0, 0, 58, 414, 1, 0, 0, 0, 60, 416, 1, 0, 0, 0, 62, 425, 1, 0, 0, 0, 64, 434, 1, 0, 0, 0, 66, 438, 1, 0, 0, 0, 68, 444, 1, 0, 0, 0, 70, 448, 1, 0, 0, 0, 72, 451, 1, 0, 0, 0, 74, 459, 1, 0, 0, 0, 76, 463, 1, 0, 0, 0, 78, 467, 1, 0, 0, 0, 80, 470, 1, 0, 0, 0, 82, 475, 1, 0, 0, 0, 84, 479, 1, 0, 0, 0, 86, 481, 1, 0, 0, 0, 88, 483, 1, 0, 0, 0, 90, 486, 1, 0, 0, 0, 92, 494, 1, 0, 0, 0, 94, 496, 1, 0, 0, 0, 96, 522, 1, 0, 0, 0, 98, 526, 1, 0, 0, 0, 100, 101, 3, 2, 1, 0, 101, 102, 5, 0, 0, 1, 102, 1, 1, 0, 0, 0, 103, 104, 6, 1, -1, 0, 104, 105, 3, 4, 2, 0, 105, 111, 1, 0, 0, 0, 106, 107, 10, 1, 0, 0, 107, 108, 5, 26, 0, 0, 108, 110, 3, 6, 3, 0, 109, 106, 1, 0, 0, 0, 110, 113, 1, 0, 0, 0, 111, 109, 1, 0, 0, 0, 111, 112, 1, 0, 0, 0, 112, 3, 1, 0, 0, 0, 113, 111, 1, 0, 0, 0, 114, 119, 3, 88, 44, 0, 115, 119, 3, 28, 14, 0, 116, 119, 3, 22, 11, 0, 117, 119, 3, 92, 46, 0, 118, 114, 1, 0, 0, 0, 118, 115, 1, 0, 0, 0, 118, 116, 1, 0, 0, 0, 118, 117, 1, 0, 0, 0, 119, 5, 1, 0, 0, 0, 120, 134, 3, 32, 16, 0, 121, 134, 3, 36, 18, 0, 122, 134, 3, 52, 26, 0, 123, 134, 3, 58, 29, 0, 124, 134, 3, 54, 27, 0, 125, 134, 3, 34, 17, 0, 126, 134, 3, 8, 4, 0, 127, 134, 3, 60, 30, 0, 128, 134, 3, 62, 31, 0, 129, 134, 3, 66, 33, 0, 130, 134, 3, 68, 34, 0, 131, 134, 3, 94, 47, 0, 132, 134, 3, 70, 35, 0, 133, 120, 1, 0, 0, 0, 133, 121, 1, 0, 0, 0, 133, 122, 1, 0, 0, 0, 133, 123, 1, 0, 0, 0, 133, 124, 1, 0, 0, 0, 133, 125, 1, 0, 0, 0, 133, 126, 1, 0, 0, 0, 133, 127, 1, 0, 0, 0, 133, 128, 1, 0, 0, 0, 133, 129, 1, 0, 0, 0, 133, 130, 1, 0, 0, 0, 133, 131, 1, 0, 0, 0, 133, 132, 1, 0, 0, 0, 134, 7, 1, 0, 0, 0, 135, 136, 5, 18, 0, 0, 136, 137, 3, 10, 5, 0, 137, 9, 1, 0, 0, 0, 138, 139, 6, 5, -1, 0, 139, 140, 5, 44, 0, 0, 140, 167, 3, 10, 5, 7, 141, 167, 3, 14, 7, 0, 142, 167, 3, 12, 6, 0, 143, 145, 3, 14, 7, 0, 144, 146, 5, 44, 0, 0, 145, 144, 1, 0, 0, 0, 145, 146, 1, 0, 0, 0, 146, 147, 1, 0, 0, 0, 147, 148, 5, 41, 0, 0, 148, 149, 5, 40, 0, 0, 149, 154, 3, 14, 7, 0, 150, 151, 5, 34, 0, 0, 151, 153, 3, 14, 7, 0, 152, 150, 1, 0, 0, 0, 153, 156, 1, 0, 0, 0, 154, 152, 1, 0, 0, 0, 154, 155, 1, 0, 0, 0, 155, 157, 1, 0, 0, 0, 156, 154, 1, 0, 0, 0, 157, 158, 5, 50, 0, 0, 158, 167, 1, 0, 0, 0, 159, 160, 3, 14, 7, 0, 160, 162, 5, 42, 0, 0, 161, 163, 5, 44, 0, 0, 162, 161, 1, 0, 0, 0, 162, 163, 1, 0, 0, 0, 163, 164, 1, 0, 0, 0, 164, 165, 5, 45, 0, 0, 165, 167, 1, 0, 0, 0, 166, 138, 1, 0, 0, 0, 166, 141, 1, 0, 0, 0, 166, 142, 1, 0, 0, 0, 166, 143, 1, 0, 0, 0, 166, 159, 1, 0, 0, 0, 167, 176, 1, 0, 0, 0, 168, 169, 10, 4, 0, 0, 169, 170, 5, 31, 0, 0, 170, 175, 3, 10, 5, 5, 171, 172, 10, 3, 0, 0, 172, 173, 5, 47, 0, 0, 173, 175, 3, 10, 5, 4, 174, 168, 1, 0, 0, 0, 174, 171, 1, 0, 0, 0, 175, 178, 1, 0, 0, 0, 176, 174, 1, 0, 0, 0, 176, 177, 1, 0, 0, 0, 177, 11, 1, 0, 0, 0, 178, 176, 1, 0, 0, 0, 179, 181, 3, 14, 7, 0, 180, 182, 5, 44, 0, 0, 181, 180, 1, 0, 0, 0, 181, 182, 1, 0, 0, 0, 182, 183, 1, 0, 0, 0, 183, 184, 5, 43, 0, 0, 184, 185, 3, 84, 42, 0, 185, 194, 1, 0, 0, 0, 186, 188, 3, 14, 7, 0, 187, 189, 5, 44, 0, 0, 188, 187, 1, 0, 0, 0, 188, 189, 1, 0, 0, 0, 189, 190, 1, 0, 0, 0, 190, 191, 5, 49, 0, 0, 191, 192, 3, 84, 42, 0, 192, 194, 1, 0, 0, 0, 193, 179, 1, 0, 0, 0, 193, 186, 1, 0, 0, 0, 194, 13, 1, 0, 0, 0, 195, 201, 3, 16, 8, 0, 196, 197, 3, 16, 8, 0, 197, 198, 3, 86, 43, 0, 198, 199, 3, 16, 8, 0, 199, 201, 1, 0, 0, 0, 200, 195, 1, 0, 0, 0, 200, 196, 1, 0, 0, 0, 201, 15, 1, 0, 0, 0, 202, 203, 6, 8, -1, 0, 203, 207, 3, 18, 9, 0, 204, 205, 7, 0, 0, 0, 205, 207, 3, 16, 8, 3, 206, 202, 1, 0, 0, 0, 206, 204, 1, 0, 0, 0, 207, 216, 1, 0, 0, 0, 208, 209, 10, 2, 0, 0, 209, 210, 7, 1, 0, 0, 210, 215, 3, 16, 8, 3, 211, 212, 10, 1, 0, 0, 212, 213, 7, 0, 0, 0, 213, 215, 3, 16, 8, 2, 214, 208, 1, 0, 0, 0, 214, 211, 1, 0, 0, 0, 215, 218, 1, 0, 0, 0, 216, 214, 1, 0, 0, 0, 216, 217, 1, 0, 0, 0, 217, 17, 1, 0, 0, 0, 218, 216, 1, 0, 0, 0, 219, 227, 3, 50, 25, 0, 220, 227, 3, 42, 21, 0, 221, 227, 3, 20, 10, 0, 222, 223, 5, 40, 0, 0, 223, 224, 3, 10, 5, 0, 224, 225, 5, 50, 0, 0, 225, 227, 1, 0, 0, 0, 226, 219, 1, 0, 0, 0, 226, 220, 1, 0, 0, 0, 226, 221, 1, 0, 0, 0, 226, 222, 1, 0, 0, 0, 227, 19, 1, 0, 0, 0, 228, 229, 3, 46, 23, 0, 229, 239, 5, 40, 0, 0, 230, 240, 5, 60, 0, 0, 231, 236, 3, 10, 5, 0, 232, 233, 5, 34, 0, 0, 233, 235, 3, 10, 5, 0, 234, 232, 1, 0, 0, 0, 235, 238, 1, 0, 0, 0, 236, 234, 1, 0, 0, 0, 236, 237, 1, 0, 0, 0, 237, 240, 1, 0, 0, 0, 238, 236, 1, 0, 0, 0, 239, 230, 1, 0, 0, 0, 239, 231, 1, 0, 0, 0, 239, 240, 1, 0, 0, 0, 240, 241, 1, 0, 0, 0, 241, 242, 5, 50, 0, 0, 242, 21, 1, 0, 0, 0, 243, 244, 5, 14, 0, 0, 244, 245, 3, 24, 12, 0, 245, 23, 1, 0, 0, 0, 246, 251, 3, 26, 13, 0, 247, 248, 5, 34, 0, 0, 248, 250, 3, 26, 13, 0, 249, 247, 1, 0, 0, 0, 250, 253, 1, 0, 0, 0, 251, 249, 1, 0, 0, 0, 251, 252, 1, 0, 0, 0, 252, 25, 1, 0, 0, 0, 253, 251, 1, 0, 0, 0, 254, 260, 3, 10, 5, 0, 255, 256, 3, 42, 21, 0, 256, 257, 5, 33, 0, 0, 257, 258, 3, 10, 5, 0, 258, 260, 1, 0, 0, 0, 259, 254, 1, 0, 0, 0, 259, 255, 1, 0, 0, 0, 260, 27, 1, 0, 0, 0, 261, 262, 5, 6, 0, 0, 262, 267, 3, 40, 20, 0, 263, 264, 5, 34, 0, 0, 264, 266, 3, 40, 20, 0, 265, 263, 1, 0, 0, 0, 266, 269, 1, 0, 0, 0, 267, 265, 1, 0, 0, 0, 267, 268, 1, 0, 0, 0, 268, 271, 1, 0, 0, 0, 269, 267, 1, 0, 0, 0, 270, 272, 3, 30, 15, 0, 271, 270, 1, 0, 0, 0, 271, 272, 1, 0, 0, 0, 272, 29, 1, 0, 0, 0, 273, 274, 5, 63, 0, 0, 274, 275, 5, 70, 0, 0, 275, 280, 3, 40, 20, 0, 276, 277, 5, 34, 0, 0, 277, 279, 3, 40, 20, 0, 278, 276, 1, 0, 0, 0, 279, 282, 1, 0, 0, 0, 280, 278, 1, 0, 0, 0, 280, 281, 1, 0, 0, 0, 281, 283, 1, 0, 0, 0, 282, 280, 1, 0, 0, 0, 283, 284, 5, 64, 0, 0, 284, 31, 1, 0, 0, 0, 285, 286, 5, 4, 0, 0, 286, 287, 3, 24, 12, 0, 287, 33, 1, 0, 0, 0, 288, 290, 5, 17, 0, 0, 289, 291, 3, 24, 12, 0, 290, 289, 1, 0, 0, 0, 290, 291, 1, 0, 0, 0, 291, 294, 1, 0, 0, 0, 292, 293, 5, 30, 0, 0, 293, 295, 3, 38, 19, 0, 294, 292, 1, 0, 0, 0, 294, 295, 1, 0, 0, 0, 295, 35, 1, 0, 0, 0, 296, 297, 5, 8, 0, 0, 297, 300, 3, 24, 12, 0, 298, 299, 5, 30, 0, 0, 299, 301, 3, 38, 19, 0, 300, 298, 1, 0, 0, 0, 300, 301, 1, 0, 0, 0, 301, 37, 1, 0, 0, 0, 302, 307, 3, 42, 21, 0, 303, 304, 5, 34, 0, 0, 304, 306, 3, 42, 21, 0, 305, 303, 1, 0, 0, 0, 306, 309, 1, 0, 0, 0, 307, 305, 1, 0, 0, 0, 307, 308, 1, 0, 0, 0, 308, 39, 1, 0, 0, 0, 309, 307, 1, 0, 0, 0, 310, 311, 7, 2, 0, 0, 311, 41, 1, 0, 0, 0, 312, 317, 3, 46, 23, 0, 313, 314, 5, 36, 0, 0, 314, 316, 3, 46, 23, 0, 315, 313, 1, 0, 0, 0, 316, 319, 1, 0, 0, 0, 317, 315, 1, 0, 0, 0, 317, 318, 1, 0, 0, 0, 318, 43, 1, 0, 0, 0, 319, 317, 1, 0, 0, 0, 320, 325, 3, 48, 24, 0, 321, 322, 5, 36, 0, 0, 322, 324, 3, 48, 24, 0, 323, 321, 1, 0, 0, 0, 324, 327, 1, 0, 0, 0, 325, 323, 1, 0, 0, 0, 325, 326, 1, 0, 0, 0, 326, 45, 1, 0, 0, 0, 327, 325, 1, 0, 0, 0, 328, 329, 7, 3, 0, 0, 329, 47, 1, 0, 0, 0, 330, 331, 7, 4, 0, 0, 331, 49, 1, 0, 0, 0, 332, 375, 5, 45, 0, 0, 333, 334, 3, 82, 41, 0, 334, 335, 5, 65, 0, 0, 335, 375, 1, 0, 0, 0, 336, 375, 3, 80, 40, 0, 337, 375, 3, 82, 41, 0, 338, 375, 3, 76, 38, 0, 339, 375, 5, 48, 0, 0, 340, 375, 3, 84, 42, 0, 341, 342, 5, 63, 0, 0, 342, 347, 3, 78, 39, 0, 343, 344, 5, 34, 0, 0, 344, 346, 3, 78, 39, 0, 345, 343, 1, 0, 0, 0, 346, 349, 1, 0, 0, 0, 347, 345, 1, 0, 0, 0, 347, 348, 1, 0, 0, 0, 348, 350, 1, 0, 0, 0, 349, 347, 1, 0, 0, 0, 350, 351, 5, 64, 0, 0, 351, 375, 1, 0, 0, 0, 352, 353, 5, 63, 0, 0, 353, 358, 3, 76, 38, 0, 354, 355, 5, 34, 0, 0, 355, 357, 3, 76, 38, 0, 356, 354, 1, 0, 0, 0, 357, 360, 1, 0, 0, 0, 358, 356, 1, 0, 0, 0, 358, 359, 1, 0, 0, 0, 359, 361, 1, 0, 0, 0, 360, 358, 1, 0, 0, 0, 361, 362, 5, 64, 0, 0, 362, 375, 1, 0, 0, 0, 363, 364, 5, 63, 0, 0, 364, 369, 3, 84, 42, 0, 365, 366, 5, 34, 0, 0, 366, 368, 3, 84, 42, 0, 367, 365, 1, 0, 0, 0, 368, 371, 1, 0, 0, 0, 369, 367, 1, 0, 0, 0, 369, 370, 1, 0, 0, 0, 370, 372, 1, 0, 0, 0, 371, 369, 1, 0, 0, 0, 372, 373, 5, 64, 0, 0, 373, 375, 1, 0, 0, 0, 374, 332, 1, 0, 0, 0, 374, 333, 1, 0, 0, 0, 374, 336, 1, 0, 0, 0, 374, 337, 1, 0, 0, 0, 374, 338, 1, 0, 0, 0, 374, 339, 1, 0, 0, 0, 374, 340, 1, 0, 0, 0, 374, 341, 1, 0, 0, 0, 374, 352, 1, 0, 0, 0, 374, 363, 1, 0, 0, 0, 375, 51, 1, 0, 0, 0, 376, 377, 5, 10, 0, 0, 377, 378, 5, 28, 0, 0, 378, 53, 1, 0, 0, 0, 379, 380, 5, 16, 0, 0, 380, 385, 3, 56, 28, 0, 381, 382, 5, 34, 0, 0, 382, 384, 3, 56, 28, 0, 383, 381, 1, 0, 0, 0, 384, 387, 1, 0, 0, 0, 385, 383, 1, 0, 0, 0, 385, 386, 1, 0, 0, 0, 386, 55, 1, 0, 0, 0, 387, 385, 1, 0, 0, 0, 388, 390, 3, 10, 5, 0, 389, 391, 7, 5, 0, 0, 390, 389, 1, 0, 0, 0, 390, 391, 1, 0, 0, 0, 391, 394, 1, 0, 0, 0, 392, 393, 5, 46, 0, 0, 393, 395, 7, 6, 0, 0, 394, 392, 1, 0, 0, 0, 394, 395, 1, 0, 0, 0, 395, 57, 1, 0, 0, 0, 396, 397, 5, 9, 0, 0, 397, 402, 3, 44, 22, 0, 398, 399, 5, 34, 0, 0, 399, 401, 3, 44, 22, 0, 400, 398, 1, 0, 0, 0, 401, 404, 1, 0, 0, 0, 402, 400, 1, 0, 0, 0, 402, 403, 1, 0, 0, 0, 403, 415, 1, 0, 0, 0, 404, 402, 1, 0, 0, 0, 405, 406, 5, 12, 0, 0, 406, 411, 3, 44, 22, 0, 407, 408, 5, 34, 0, 0, 408, 410, 3, 44, 22, 0, 409, 407, 1, 0, 0, 0, 410, 413, 1, 0, 0, 0, 411, 409, 1, 0, 0, 0, 411, 412, 1, 0, 0, 0, 412, 415, 1, 0, 0, 0, 413, 411, 1, 0, 0, 0, 414, 396, 1, 0, 0, 0, 414, 405, 1, 0, 0, 0, 415, 59, 1, 0, 0, 0, 416, 417, 5, 2, 0, 0, 417, 422, 3, 44, 22, 0, 418, 419, 5, 34, 0, 0, 419, 421, 3, 44, 22, 0, 420, 418, 1, 0, 0, 0, 421, 424, 1, 0, 0, 0, 422, 420, 1, 0, 0, 0, 422, 423, 1, 0, 0, 0, 423, 61, 1, 0, 0, 0, 424, 422, 1, 0, 0, 0, 425, 426, 5, 13, 0, 0, 426, 431, 3, 64, 32, 0, 427, 428, 5, 34, 0, 0, 428, 430, 3, 64, 32, 0, 429, 427, 1, 0, 0, 0, 430, 433, 1, 0, 0, 0, 431, 429, 1, 0, 0, 0, 431, 432, 1, 0, 0, 0, 432, 63, 1, 0, 0, 0, 433, 431, 1, 0, 0, 0, 434, 435, 3, 44, 22, 0, 435, 436, 5, 79, 0, 0, 436, 437, 3, 44, 22, 0, 437, 65, 1, 0, 0, 0, 438, 439, 5, 1, 0, 0, 439, 440, 3, 18, 9, 0, 440, 442, 3, 84, 42, 0, 441, 443, 3, 72, 36, 0, 442, 441, 1, 0, 0, 0, 442, 443, 1, 0, 0, 0, 443, 67, 1, 0, 0, 0, 444, 445, 5, 7, 0, 0, 445, 446, 3, 18, 9, 0, 446, 447, 3, 84, 42, 0, 447, 69, 1, 0, 0, 0, 448, 449, 5, 11, 0, 0, 449, 450, 3, 42, 21, 0, 450, 71, 1, 0, 0, 0, 451, 456, 3, 74, 37, 0, 452, 453, 5, 34, 0, 0, 453, 455, 3, 74, 37, 0, 454, 452, 1, 0, 0, 0, 455, 458, 1, 0, 0, 0, 456, 454, 1, 0, 0, 0, 456, 457, 1, 0, 0, 0, 457, 73, 1, 0, 0, 0, 458, 456, 1, 0, 0, 0, 459, 460, 3, 46, 23, 0, 460, 461, 5, 33, 0, 0, 461, 462, 3, 50, 25, 0, 462, 75, 1, 0, 0, 0, 463, 464, 7, 7, 0, 0, 464, 77, 1, 0, 0, 0, 465, 468, 3, 80, 40, 0, 466, 468, 3, 82, 41, 0, 467, 465, 1, 0, 0, 0, 467, 466, 1, 0, 0, 0, 468, 79, 1, 0, 0, 0, 469, 471, 7, 0, 0, 0, 470, 469, 1, 0, 0, 0, 470, 471, 1, 0, 0, 0, 471, 472, 1, 0, 0, 0, 472, 473, 5, 29, 0, 0, 473, 81, 1, 0, 0, 0, 474, 476, 7, 0, 0, 0, 475, 474, 1, 0, 0, 0, 475, 476, 1, 0, 0, 0, 476, 477, 1, 0, 0, 0, 477, 478, 5, 28, 0, 0, 478, 83, 1, 0, 0, 0, 479, 480, 5, 27, 0, 0, 480, 85, 1, 0, 0, 0, 481, 482, 7, 8, 0, 0, 482, 87, 1, 0, 0, 0, 483, 484, 5, 5, 0, 0, 484, 485, 3, 90, 45, 0, 485, 89, 1, 0, 0, 0, 486, 487, 5, 63, 0, 0, 487, 488, 3, 2, 1, 0, 488, 489, 5, 64, 0, 0, 489, 91, 1, 0, 0, 0, 490, 491, 5, 15, 0, 0, 491, 495, 5, 95, 0, 0, 492, 493, 5, 15, 0, 0, 493, 495, 5, 96, 0, 0, 494, 490, 1, 0, 0, 0, 494, 492, 1, 0, 0, 0, 495, 93, 1, 0, 0, 0, 496, 500, 5, 3, 0, 0, 497, 499, 3, 98, 49, 0, 498, 497, 1, 0, 0, 0, 499, 502, 1, 0, 0, 0, 500, 498, 1, 0, 0, 0, 500, 501, 1, 0, 0, 0, 501, 503, 1, 0, 0, 0, 502, 500, 1, 0, 0, 0, 503, 506, 5, 85, 0, 0, 504, 505, 5, 83, 0, 0, 505, 507, 3, 44, 22, 0, 506, 504, 1, 0, 0, 0, 506, 507, 1, 0, 0, 0, 507, 517, 1, 0, 0, 0, 508, 509, 5, 84, 0, 0, 509, 514, 3, 96, 48, 0, 510, 511, 5, 34, 0, 0, 511, 513, 3, 96, 48, 0, 512, 510, 1, 0, 0, 0, 513, 516, 1, 0, 0, 0, 514, 512, 1, 0, 0, 0, 514, 515, 1, 0, 0, 0, 515, 518, 1, 0, 0, 0, 516, 514, 1, 0, 0, 0, 517, 508, 1, 0, 0, 0, 517, 518, 1, 0, 0, 0, 518, 95, 1, 0, 0, 0, 519, 520, 3, 44, 22, 0, 520, 521, 5, 33, 0, 0, 521, 523, 1, 0, 0, 0, 522, 519, 1, 0, 0, 0, 522, 523, 1, 0, 0, 0, 523, 524, 1, 0, 0, 0, 524, 525, 3, 44, 22, 0, 525, 97, 1, 0, 0, 0, 526, 527, 5, 63, 0, 0, 527, 528, 5, 101, 0, 0, 528, 529, 5, 100, 0, 0, 529, 530, 5, 101, 0, 0, 530, 531, 5, 64, 0, 0, 531, 99, 1, 0, 0, 0, 53, 111, 118, 133, 145, 154, 162, 166, 174, 176, 181, 188, 193, 200, 206, 214, 216, 226, 236, 239, 251, 259, 267, 271, 280, 290, 294, 300, 307, 317, 325, 347, 358, 369, 374, 385, 390, 394, 402, 411, 414, 422, 431, 442, 456, 467, 470, 475, 494, 500, 506, 514, 517, 522] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java index 54ec466de9623..49e72c2ef2b14 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java @@ -28,14 +28,15 @@ public class EsqlBaseParser extends Parser { PLUS=58, MINUS=59, ASTERISK=60, SLASH=61, PERCENT=62, OPENING_BRACKET=63, CLOSING_BRACKET=64, UNQUOTED_IDENTIFIER=65, QUOTED_IDENTIFIER=66, EXPR_LINE_COMMENT=67, EXPR_MULTILINE_COMMENT=68, EXPR_WS=69, METADATA=70, FROM_UNQUOTED_IDENTIFIER=71, - FROM_LINE_COMMENT=72, FROM_MULTILINE_COMMENT=73, FROM_WS=74, PROJECT_UNQUOTED_IDENTIFIER=75, + FROM_LINE_COMMENT=72, FROM_MULTILINE_COMMENT=73, FROM_WS=74, UNQUOTED_ID_PATTERN=75, PROJECT_LINE_COMMENT=76, PROJECT_MULTILINE_COMMENT=77, PROJECT_WS=78, AS=79, RENAME_LINE_COMMENT=80, RENAME_MULTILINE_COMMENT=81, RENAME_WS=82, - ON=83, WITH=84, ENRICH_LINE_COMMENT=85, ENRICH_MULTILINE_COMMENT=86, ENRICH_WS=87, - ENRICH_FIELD_LINE_COMMENT=88, ENRICH_FIELD_MULTILINE_COMMENT=89, ENRICH_FIELD_WS=90, - MVEXPAND_LINE_COMMENT=91, MVEXPAND_MULTILINE_COMMENT=92, MVEXPAND_WS=93, - INFO=94, FUNCTIONS=95, SHOW_LINE_COMMENT=96, SHOW_MULTILINE_COMMENT=97, - SHOW_WS=98; + ON=83, WITH=84, ENRICH_POLICY_NAME=85, ENRICH_LINE_COMMENT=86, ENRICH_MULTILINE_COMMENT=87, + ENRICH_WS=88, ENRICH_FIELD_LINE_COMMENT=89, ENRICH_FIELD_MULTILINE_COMMENT=90, + ENRICH_FIELD_WS=91, MVEXPAND_LINE_COMMENT=92, MVEXPAND_MULTILINE_COMMENT=93, + MVEXPAND_WS=94, INFO=95, FUNCTIONS=96, SHOW_LINE_COMMENT=97, SHOW_MULTILINE_COMMENT=98, + SHOW_WS=99, COLON=100, SETTING=101, SETTING_LINE_COMMENT=102, SETTTING_MULTILINE_COMMENT=103, + SETTING_WS=104; public static final int RULE_singleStatement = 0, RULE_query = 1, RULE_sourceCommand = 2, RULE_processingCommand = 3, RULE_whereCommand = 4, RULE_booleanExpression = 5, RULE_regexBooleanExpression = 6, @@ -52,7 +53,7 @@ public class EsqlBaseParser extends Parser { RULE_booleanValue = 38, RULE_numericValue = 39, RULE_decimalValue = 40, RULE_integerValue = 41, RULE_string = 42, RULE_comparisonOperator = 43, RULE_explainCommand = 44, RULE_subqueryExpression = 45, RULE_showCommand = 46, - RULE_enrichCommand = 47, RULE_enrichWithClause = 48; + RULE_enrichCommand = 47, RULE_enrichWithClause = 48, RULE_setting = 49; private static String[] makeRuleNames() { return new String[] { "singleStatement", "query", "sourceCommand", "processingCommand", "whereCommand", @@ -65,7 +66,7 @@ private static String[] makeRuleNames() { "dissectCommand", "grokCommand", "mvExpandCommand", "commandOptions", "commandOption", "booleanValue", "numericValue", "decimalValue", "integerValue", "string", "comparisonOperator", "explainCommand", "subqueryExpression", - "showCommand", "enrichCommand", "enrichWithClause" + "showCommand", "enrichCommand", "enrichWithClause", "setting" }; } public static final String[] ruleNames = makeRuleNames(); @@ -82,7 +83,8 @@ private static String[] makeLiteralNames() { "'>'", "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", null, "']'", null, null, null, null, null, "'metadata'", null, null, null, null, null, null, null, null, "'as'", null, null, null, "'on'", "'with'", null, null, null, - null, null, null, null, null, null, "'info'", "'functions'" + null, null, null, null, null, null, null, "'info'", "'functions'", null, + null, null, "':'" }; } private static final String[] _LITERAL_NAMES = makeLiteralNames(); @@ -99,13 +101,14 @@ private static String[] makeSymbolicNames() { "SLASH", "PERCENT", "OPENING_BRACKET", "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", "EXPR_WS", "METADATA", "FROM_UNQUOTED_IDENTIFIER", "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", - "FROM_WS", "PROJECT_UNQUOTED_IDENTIFIER", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", + "FROM_WS", "UNQUOTED_ID_PATTERN", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", "PROJECT_WS", "AS", "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", - "RENAME_WS", "ON", "WITH", "ENRICH_LINE_COMMENT", "ENRICH_MULTILINE_COMMENT", - "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", "ENRICH_FIELD_MULTILINE_COMMENT", - "ENRICH_FIELD_WS", "MVEXPAND_LINE_COMMENT", "MVEXPAND_MULTILINE_COMMENT", - "MVEXPAND_WS", "INFO", "FUNCTIONS", "SHOW_LINE_COMMENT", "SHOW_MULTILINE_COMMENT", - "SHOW_WS" + "RENAME_WS", "ON", "WITH", "ENRICH_POLICY_NAME", "ENRICH_LINE_COMMENT", + "ENRICH_MULTILINE_COMMENT", "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", + "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", "MVEXPAND_LINE_COMMENT", + "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", "INFO", "FUNCTIONS", "SHOW_LINE_COMMENT", + "SHOW_MULTILINE_COMMENT", "SHOW_WS", "COLON", "SETTING", "SETTING_LINE_COMMENT", + "SETTTING_MULTILINE_COMMENT", "SETTING_WS" }; } private static final String[] _SYMBOLIC_NAMES = makeSymbolicNames(); @@ -192,9 +195,9 @@ public final SingleStatementContext singleStatement() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(98); + setState(100); query(0); - setState(99); + setState(101); match(EOF); } } @@ -290,11 +293,11 @@ private QueryContext query(int _p) throws RecognitionException { _ctx = _localctx; _prevctx = _localctx; - setState(102); + setState(104); sourceCommand(); } _ctx.stop = _input.LT(-1); - setState(109); + setState(111); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,0,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -305,16 +308,16 @@ private QueryContext query(int _p) throws RecognitionException { { _localctx = new CompositeQueryContext(new QueryContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_query); - setState(104); + setState(106); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(105); + setState(107); match(PIPE); - setState(106); + setState(108); processingCommand(); } } } - setState(111); + setState(113); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,0,_ctx); } @@ -369,34 +372,34 @@ public final SourceCommandContext sourceCommand() throws RecognitionException { SourceCommandContext _localctx = new SourceCommandContext(_ctx, getState()); enterRule(_localctx, 4, RULE_sourceCommand); try { - setState(116); + setState(118); _errHandler.sync(this); switch (_input.LA(1)) { case EXPLAIN: enterOuterAlt(_localctx, 1); { - setState(112); + setState(114); explainCommand(); } break; case FROM: enterOuterAlt(_localctx, 2); { - setState(113); + setState(115); fromCommand(); } break; case ROW: enterOuterAlt(_localctx, 3); { - setState(114); + setState(116); rowCommand(); } break; case SHOW: enterOuterAlt(_localctx, 4); { - setState(115); + setState(117); showCommand(); } break; @@ -480,27 +483,27 @@ public final ProcessingCommandContext processingCommand() throws RecognitionExce ProcessingCommandContext _localctx = new ProcessingCommandContext(_ctx, getState()); enterRule(_localctx, 6, RULE_processingCommand); try { - setState(131); + setState(133); _errHandler.sync(this); switch (_input.LA(1)) { case EVAL: enterOuterAlt(_localctx, 1); { - setState(118); + setState(120); evalCommand(); } break; case INLINESTATS: enterOuterAlt(_localctx, 2); { - setState(119); + setState(121); inlinestatsCommand(); } break; case LIMIT: enterOuterAlt(_localctx, 3); { - setState(120); + setState(122); limitCommand(); } break; @@ -508,70 +511,70 @@ public final ProcessingCommandContext processingCommand() throws RecognitionExce case PROJECT: enterOuterAlt(_localctx, 4); { - setState(121); + setState(123); keepCommand(); } break; case SORT: enterOuterAlt(_localctx, 5); { - setState(122); + setState(124); sortCommand(); } break; case STATS: enterOuterAlt(_localctx, 6); { - setState(123); + setState(125); statsCommand(); } break; case WHERE: enterOuterAlt(_localctx, 7); { - setState(124); + setState(126); whereCommand(); } break; case DROP: enterOuterAlt(_localctx, 8); { - setState(125); + setState(127); dropCommand(); } break; case RENAME: enterOuterAlt(_localctx, 9); { - setState(126); + setState(128); renameCommand(); } break; case DISSECT: enterOuterAlt(_localctx, 10); { - setState(127); + setState(129); dissectCommand(); } break; case GROK: enterOuterAlt(_localctx, 11); { - setState(128); + setState(130); grokCommand(); } break; case ENRICH: enterOuterAlt(_localctx, 12); { - setState(129); + setState(131); enrichCommand(); } break; case MV_EXPAND: enterOuterAlt(_localctx, 13); { - setState(130); + setState(132); mvExpandCommand(); } break; @@ -622,9 +625,9 @@ public final WhereCommandContext whereCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(133); + setState(135); match(WHERE); - setState(134); + setState(136); booleanExpression(0); } } @@ -819,7 +822,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc int _alt; enterOuterAlt(_localctx, 1); { - setState(164); + setState(166); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,6,_ctx) ) { case 1: @@ -828,9 +831,9 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _ctx = _localctx; _prevctx = _localctx; - setState(137); + setState(139); match(NOT); - setState(138); + setState(140); booleanExpression(7); } break; @@ -839,7 +842,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new BooleanDefaultContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(139); + setState(141); valueExpression(); } break; @@ -848,7 +851,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new RegexExpressionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(140); + setState(142); regexBooleanExpression(); } break; @@ -857,41 +860,41 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalInContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(141); - valueExpression(); setState(143); + valueExpression(); + setState(145); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(142); + setState(144); match(NOT); } } - setState(145); + setState(147); match(IN); - setState(146); + setState(148); match(LP); - setState(147); + setState(149); valueExpression(); - setState(152); + setState(154); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(148); + setState(150); match(COMMA); - setState(149); + setState(151); valueExpression(); } } - setState(154); + setState(156); _errHandler.sync(this); _la = _input.LA(1); } - setState(155); + setState(157); match(RP); } break; @@ -900,27 +903,27 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new IsNullContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(157); + setState(159); valueExpression(); - setState(158); - match(IS); setState(160); + match(IS); + setState(162); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(159); + setState(161); match(NOT); } } - setState(162); + setState(164); match(NULL); } break; } _ctx.stop = _input.LT(-1); - setState(174); + setState(176); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,8,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -928,7 +931,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(172); + setState(174); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,7,_ctx) ) { case 1: @@ -936,11 +939,11 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(166); + setState(168); if (!(precpred(_ctx, 4))) throw new FailedPredicateException(this, "precpred(_ctx, 4)"); - setState(167); + setState(169); ((LogicalBinaryContext)_localctx).operator = match(AND); - setState(168); + setState(170); ((LogicalBinaryContext)_localctx).right = booleanExpression(5); } break; @@ -949,18 +952,18 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(169); + setState(171); if (!(precpred(_ctx, 3))) throw new FailedPredicateException(this, "precpred(_ctx, 3)"); - setState(170); + setState(172); ((LogicalBinaryContext)_localctx).operator = match(OR); - setState(171); + setState(173); ((LogicalBinaryContext)_localctx).right = booleanExpression(4); } break; } } } - setState(176); + setState(178); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,8,_ctx); } @@ -1015,48 +1018,48 @@ public final RegexBooleanExpressionContext regexBooleanExpression() throws Recog enterRule(_localctx, 12, RULE_regexBooleanExpression); int _la; try { - setState(191); + setState(193); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,11,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(177); - valueExpression(); setState(179); + valueExpression(); + setState(181); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(178); + setState(180); match(NOT); } } - setState(181); + setState(183); ((RegexBooleanExpressionContext)_localctx).kind = match(LIKE); - setState(182); + setState(184); ((RegexBooleanExpressionContext)_localctx).pattern = string(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(184); - valueExpression(); setState(186); + valueExpression(); + setState(188); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(185); + setState(187); match(NOT); } } - setState(188); + setState(190); ((RegexBooleanExpressionContext)_localctx).kind = match(RLIKE); - setState(189); + setState(191); ((RegexBooleanExpressionContext)_localctx).pattern = string(); } break; @@ -1142,14 +1145,14 @@ public final ValueExpressionContext valueExpression() throws RecognitionExceptio ValueExpressionContext _localctx = new ValueExpressionContext(_ctx, getState()); enterRule(_localctx, 14, RULE_valueExpression); try { - setState(198); + setState(200); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,12,_ctx) ) { case 1: _localctx = new ValueExpressionDefaultContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(193); + setState(195); operatorExpression(0); } break; @@ -1157,11 +1160,11 @@ public final ValueExpressionContext valueExpression() throws RecognitionExceptio _localctx = new ComparisonContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(194); + setState(196); ((ComparisonContext)_localctx).left = operatorExpression(0); - setState(195); + setState(197); comparisonOperator(); - setState(196); + setState(198); ((ComparisonContext)_localctx).right = operatorExpression(0); } break; @@ -1286,7 +1289,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE int _alt; enterOuterAlt(_localctx, 1); { - setState(204); + setState(206); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,13,_ctx) ) { case 1: @@ -1295,7 +1298,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _ctx = _localctx; _prevctx = _localctx; - setState(201); + setState(203); primaryExpression(); } break; @@ -1304,7 +1307,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticUnaryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(202); + setState(204); ((ArithmeticUnaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -1315,13 +1318,13 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(203); + setState(205); operatorExpression(3); } break; } _ctx.stop = _input.LT(-1); - setState(214); + setState(216); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,15,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -1329,7 +1332,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(212); + setState(214); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,14,_ctx) ) { case 1: @@ -1337,9 +1340,9 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticBinaryContext(new OperatorExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_operatorExpression); - setState(206); + setState(208); if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); - setState(207); + setState(209); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(((_la) & ~0x3f) == 0 && ((1L << _la) & 8070450532247928832L) != 0) ) { @@ -1350,7 +1353,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(208); + setState(210); ((ArithmeticBinaryContext)_localctx).right = operatorExpression(3); } break; @@ -1359,9 +1362,9 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticBinaryContext(new OperatorExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_operatorExpression); - setState(209); + setState(211); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(210); + setState(212); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -1372,14 +1375,14 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(211); + setState(213); ((ArithmeticBinaryContext)_localctx).right = operatorExpression(2); } break; } } } - setState(216); + setState(218); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,15,_ctx); } @@ -1501,14 +1504,14 @@ public final PrimaryExpressionContext primaryExpression() throws RecognitionExce PrimaryExpressionContext _localctx = new PrimaryExpressionContext(_ctx, getState()); enterRule(_localctx, 18, RULE_primaryExpression); try { - setState(224); + setState(226); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,16,_ctx) ) { case 1: _localctx = new ConstantDefaultContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(217); + setState(219); constant(); } break; @@ -1516,7 +1519,7 @@ public final PrimaryExpressionContext primaryExpression() throws RecognitionExce _localctx = new DereferenceContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(218); + setState(220); qualifiedName(); } break; @@ -1524,7 +1527,7 @@ public final PrimaryExpressionContext primaryExpression() throws RecognitionExce _localctx = new FunctionContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(219); + setState(221); functionExpression(); } break; @@ -1532,11 +1535,11 @@ public final PrimaryExpressionContext primaryExpression() throws RecognitionExce _localctx = new ParenthesizedExpressionContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(220); + setState(222); match(LP); - setState(221); + setState(223); booleanExpression(0); - setState(222); + setState(224); match(RP); } break; @@ -1598,16 +1601,16 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(226); + setState(228); identifier(); - setState(227); + setState(229); match(LP); - setState(237); + setState(239); _errHandler.sync(this); switch (_input.LA(1)) { case ASTERISK: { - setState(228); + setState(230); match(ASTERISK); } break; @@ -1627,21 +1630,21 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx case QUOTED_IDENTIFIER: { { - setState(229); + setState(231); booleanExpression(0); - setState(234); + setState(236); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(230); + setState(232); match(COMMA); - setState(231); + setState(233); booleanExpression(0); } } - setState(236); + setState(238); _errHandler.sync(this); _la = _input.LA(1); } @@ -1653,7 +1656,7 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx default: break; } - setState(239); + setState(241); match(RP); } } @@ -1700,9 +1703,9 @@ public final RowCommandContext rowCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(241); + setState(243); match(ROW); - setState(242); + setState(244); fields(); } } @@ -1756,23 +1759,23 @@ public final FieldsContext fields() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(244); + setState(246); field(); - setState(249); + setState(251); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,19,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(245); + setState(247); match(COMMA); - setState(246); + setState(248); field(); } } } - setState(251); + setState(253); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,19,_ctx); } @@ -1822,24 +1825,24 @@ public final FieldContext field() throws RecognitionException { FieldContext _localctx = new FieldContext(_ctx, getState()); enterRule(_localctx, 26, RULE_field); try { - setState(257); + setState(259); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,20,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(252); + setState(254); booleanExpression(0); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(253); + setState(255); qualifiedName(); - setState(254); + setState(256); match(ASSIGN); - setState(255); + setState(257); booleanExpression(0); } break; @@ -1899,34 +1902,34 @@ public final FromCommandContext fromCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(259); + setState(261); match(FROM); - setState(260); + setState(262); fromIdentifier(); - setState(265); + setState(267); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,21,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(261); + setState(263); match(COMMA); - setState(262); + setState(264); fromIdentifier(); } } } - setState(267); + setState(269); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,21,_ctx); } - setState(269); + setState(271); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,22,_ctx) ) { case 1: { - setState(268); + setState(270); metadata(); } break; @@ -1986,29 +1989,29 @@ public final MetadataContext metadata() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(271); + setState(273); match(OPENING_BRACKET); - setState(272); + setState(274); match(METADATA); - setState(273); + setState(275); fromIdentifier(); - setState(278); + setState(280); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(274); + setState(276); match(COMMA); - setState(275); + setState(277); fromIdentifier(); } } - setState(280); + setState(282); _errHandler.sync(this); _la = _input.LA(1); } - setState(281); + setState(283); match(CLOSING_BRACKET); } } @@ -2055,9 +2058,9 @@ public final EvalCommandContext evalCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(283); + setState(285); match(EVAL); - setState(284); + setState(286); fields(); } } @@ -2108,26 +2111,26 @@ public final StatsCommandContext statsCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(286); - match(STATS); setState(288); + match(STATS); + setState(290); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,24,_ctx) ) { case 1: { - setState(287); + setState(289); fields(); } break; } - setState(292); + setState(294); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,25,_ctx) ) { case 1: { - setState(290); + setState(292); match(BY); - setState(291); + setState(293); grouping(); } break; @@ -2181,18 +2184,18 @@ public final InlinestatsCommandContext inlinestatsCommand() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(294); + setState(296); match(INLINESTATS); - setState(295); + setState(297); fields(); - setState(298); + setState(300); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,26,_ctx) ) { case 1: { - setState(296); + setState(298); match(BY); - setState(297); + setState(299); grouping(); } break; @@ -2249,23 +2252,23 @@ public final GroupingContext grouping() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(300); + setState(302); qualifiedName(); - setState(305); + setState(307); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,27,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(301); + setState(303); match(COMMA); - setState(302); + setState(304); qualifiedName(); } } } - setState(307); + setState(309); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,27,_ctx); } @@ -2313,7 +2316,7 @@ public final FromIdentifierContext fromIdentifier() throws RecognitionException try { enterOuterAlt(_localctx, 1); { - setState(308); + setState(310); _la = _input.LA(1); if ( !(_la==QUOTED_IDENTIFIER || _la==FROM_UNQUOTED_IDENTIFIER) ) { _errHandler.recoverInline(this); @@ -2375,23 +2378,23 @@ public final QualifiedNameContext qualifiedName() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(310); + setState(312); identifier(); - setState(315); + setState(317); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,28,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(311); + setState(313); match(DOT); - setState(312); + setState(314); identifier(); } } } - setState(317); + setState(319); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,28,_ctx); } @@ -2447,23 +2450,23 @@ public final QualifiedNamePatternContext qualifiedNamePattern() throws Recogniti int _alt; enterOuterAlt(_localctx, 1); { - setState(318); + setState(320); identifierPattern(); - setState(323); + setState(325); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,29,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(319); + setState(321); match(DOT); - setState(320); + setState(322); identifierPattern(); } } } - setState(325); + setState(327); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,29,_ctx); } @@ -2511,7 +2514,7 @@ public final IdentifierContext identifier() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(326); + setState(328); _la = _input.LA(1); if ( !(_la==UNQUOTED_IDENTIFIER || _la==QUOTED_IDENTIFIER) ) { _errHandler.recoverInline(this); @@ -2536,7 +2539,7 @@ public final IdentifierContext identifier() throws RecognitionException { @SuppressWarnings("CheckReturnValue") public static class IdentifierPatternContext extends ParserRuleContext { - public TerminalNode PROJECT_UNQUOTED_IDENTIFIER() { return getToken(EsqlBaseParser.PROJECT_UNQUOTED_IDENTIFIER, 0); } + public TerminalNode UNQUOTED_ID_PATTERN() { return getToken(EsqlBaseParser.UNQUOTED_ID_PATTERN, 0); } public TerminalNode QUOTED_IDENTIFIER() { return getToken(EsqlBaseParser.QUOTED_IDENTIFIER, 0); } @SuppressWarnings("this-escape") public IdentifierPatternContext(ParserRuleContext parent, int invokingState) { @@ -2565,9 +2568,9 @@ public final IdentifierPatternContext identifierPattern() throws RecognitionExce try { enterOuterAlt(_localctx, 1); { - setState(328); + setState(330); _la = _input.LA(1); - if ( !(_la==QUOTED_IDENTIFIER || _la==PROJECT_UNQUOTED_IDENTIFIER) ) { + if ( !(_la==QUOTED_IDENTIFIER || _la==UNQUOTED_ID_PATTERN) ) { _errHandler.recoverInline(this); } else { @@ -2842,14 +2845,14 @@ public final ConstantContext constant() throws RecognitionException { enterRule(_localctx, 50, RULE_constant); int _la; try { - setState(372); + setState(374); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,33,_ctx) ) { case 1: _localctx = new NullLiteralContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(330); + setState(332); match(NULL); } break; @@ -2857,9 +2860,9 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new QualifiedIntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(331); + setState(333); integerValue(); - setState(332); + setState(334); match(UNQUOTED_IDENTIFIER); } break; @@ -2867,7 +2870,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new DecimalLiteralContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(334); + setState(336); decimalValue(); } break; @@ -2875,7 +2878,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new IntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(335); + setState(337); integerValue(); } break; @@ -2883,7 +2886,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new BooleanLiteralContext(_localctx); enterOuterAlt(_localctx, 5); { - setState(336); + setState(338); booleanValue(); } break; @@ -2891,7 +2894,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new InputParamContext(_localctx); enterOuterAlt(_localctx, 6); { - setState(337); + setState(339); match(PARAM); } break; @@ -2899,7 +2902,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new StringLiteralContext(_localctx); enterOuterAlt(_localctx, 7); { - setState(338); + setState(340); string(); } break; @@ -2907,27 +2910,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new NumericArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 8); { - setState(339); + setState(341); match(OPENING_BRACKET); - setState(340); + setState(342); numericValue(); - setState(345); + setState(347); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(341); + setState(343); match(COMMA); - setState(342); + setState(344); numericValue(); } } - setState(347); + setState(349); _errHandler.sync(this); _la = _input.LA(1); } - setState(348); + setState(350); match(CLOSING_BRACKET); } break; @@ -2935,27 +2938,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new BooleanArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 9); { - setState(350); + setState(352); match(OPENING_BRACKET); - setState(351); + setState(353); booleanValue(); - setState(356); + setState(358); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(352); + setState(354); match(COMMA); - setState(353); + setState(355); booleanValue(); } } - setState(358); + setState(360); _errHandler.sync(this); _la = _input.LA(1); } - setState(359); + setState(361); match(CLOSING_BRACKET); } break; @@ -2963,27 +2966,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new StringArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 10); { - setState(361); + setState(363); match(OPENING_BRACKET); - setState(362); + setState(364); string(); - setState(367); + setState(369); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(363); + setState(365); match(COMMA); - setState(364); + setState(366); string(); } } - setState(369); + setState(371); _errHandler.sync(this); _la = _input.LA(1); } - setState(370); + setState(372); match(CLOSING_BRACKET); } break; @@ -3030,9 +3033,9 @@ public final LimitCommandContext limitCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(374); + setState(376); match(LIMIT); - setState(375); + setState(377); match(INTEGER_LITERAL); } } @@ -3087,25 +3090,25 @@ public final SortCommandContext sortCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(377); + setState(379); match(SORT); - setState(378); + setState(380); orderExpression(); - setState(383); + setState(385); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,34,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(379); + setState(381); match(COMMA); - setState(380); + setState(382); orderExpression(); } } } - setState(385); + setState(387); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,34,_ctx); } @@ -3161,14 +3164,14 @@ public final OrderExpressionContext orderExpression() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(386); - booleanExpression(0); setState(388); + booleanExpression(0); + setState(390); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,35,_ctx) ) { case 1: { - setState(387); + setState(389); ((OrderExpressionContext)_localctx).ordering = _input.LT(1); _la = _input.LA(1); if ( !(_la==ASC || _la==DESC) ) { @@ -3182,14 +3185,14 @@ public final OrderExpressionContext orderExpression() throws RecognitionExceptio } break; } - setState(392); + setState(394); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,36,_ctx) ) { case 1: { - setState(390); + setState(392); match(NULLS); - setState(391); + setState(393); ((OrderExpressionContext)_localctx).nullOrdering = _input.LT(1); _la = _input.LA(1); if ( !(_la==FIRST || _la==LAST) ) { @@ -3255,31 +3258,31 @@ public final KeepCommandContext keepCommand() throws RecognitionException { enterRule(_localctx, 58, RULE_keepCommand); try { int _alt; - setState(412); + setState(414); _errHandler.sync(this); switch (_input.LA(1)) { case KEEP: enterOuterAlt(_localctx, 1); { - setState(394); + setState(396); match(KEEP); - setState(395); + setState(397); qualifiedNamePattern(); - setState(400); + setState(402); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,37,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(396); + setState(398); match(COMMA); - setState(397); + setState(399); qualifiedNamePattern(); } } } - setState(402); + setState(404); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,37,_ctx); } @@ -3288,25 +3291,25 @@ public final KeepCommandContext keepCommand() throws RecognitionException { case PROJECT: enterOuterAlt(_localctx, 2); { - setState(403); + setState(405); match(PROJECT); - setState(404); + setState(406); qualifiedNamePattern(); - setState(409); + setState(411); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,38,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(405); + setState(407); match(COMMA); - setState(406); + setState(408); qualifiedNamePattern(); } } } - setState(411); + setState(413); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,38,_ctx); } @@ -3367,25 +3370,25 @@ public final DropCommandContext dropCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(414); + setState(416); match(DROP); - setState(415); + setState(417); qualifiedNamePattern(); - setState(420); + setState(422); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,40,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(416); + setState(418); match(COMMA); - setState(417); + setState(419); qualifiedNamePattern(); } } } - setState(422); + setState(424); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,40,_ctx); } @@ -3442,25 +3445,25 @@ public final RenameCommandContext renameCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(423); + setState(425); match(RENAME); - setState(424); + setState(426); renameClause(); - setState(429); + setState(431); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,41,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(425); + setState(427); match(COMMA); - setState(426); + setState(428); renameClause(); } } } - setState(431); + setState(433); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,41,_ctx); } @@ -3514,11 +3517,11 @@ public final RenameClauseContext renameClause() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(432); + setState(434); ((RenameClauseContext)_localctx).oldName = qualifiedNamePattern(); - setState(433); + setState(435); match(AS); - setState(434); + setState(436); ((RenameClauseContext)_localctx).newName = qualifiedNamePattern(); } } @@ -3571,18 +3574,18 @@ public final DissectCommandContext dissectCommand() throws RecognitionException try { enterOuterAlt(_localctx, 1); { - setState(436); + setState(438); match(DISSECT); - setState(437); + setState(439); primaryExpression(); - setState(438); - string(); setState(440); + string(); + setState(442); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,42,_ctx) ) { case 1: { - setState(439); + setState(441); commandOptions(); } break; @@ -3635,11 +3638,11 @@ public final GrokCommandContext grokCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(442); + setState(444); match(GROK); - setState(443); + setState(445); primaryExpression(); - setState(444); + setState(446); string(); } } @@ -3686,9 +3689,9 @@ public final MvExpandCommandContext mvExpandCommand() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(446); + setState(448); match(MV_EXPAND); - setState(447); + setState(449); qualifiedName(); } } @@ -3742,23 +3745,23 @@ public final CommandOptionsContext commandOptions() throws RecognitionException int _alt; enterOuterAlt(_localctx, 1); { - setState(449); + setState(451); commandOption(); - setState(454); + setState(456); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,43,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(450); + setState(452); match(COMMA); - setState(451); + setState(453); commandOption(); } } } - setState(456); + setState(458); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,43,_ctx); } @@ -3810,11 +3813,11 @@ public final CommandOptionContext commandOption() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(457); + setState(459); identifier(); - setState(458); + setState(460); match(ASSIGN); - setState(459); + setState(461); constant(); } } @@ -3860,7 +3863,7 @@ public final BooleanValueContext booleanValue() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(461); + setState(463); _la = _input.LA(1); if ( !(_la==FALSE || _la==TRUE) ) { _errHandler.recoverInline(this); @@ -3915,20 +3918,20 @@ public final NumericValueContext numericValue() throws RecognitionException { NumericValueContext _localctx = new NumericValueContext(_ctx, getState()); enterRule(_localctx, 78, RULE_numericValue); try { - setState(465); + setState(467); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,44,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(463); + setState(465); decimalValue(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(464); + setState(466); integerValue(); } break; @@ -3977,12 +3980,12 @@ public final DecimalValueContext decimalValue() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(468); + setState(470); _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(467); + setState(469); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -3995,7 +3998,7 @@ public final DecimalValueContext decimalValue() throws RecognitionException { } } - setState(470); + setState(472); match(DECIMAL_LITERAL); } } @@ -4042,12 +4045,12 @@ public final IntegerValueContext integerValue() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(473); + setState(475); _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(472); + setState(474); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -4060,7 +4063,7 @@ public final IntegerValueContext integerValue() throws RecognitionException { } } - setState(475); + setState(477); match(INTEGER_LITERAL); } } @@ -4104,7 +4107,7 @@ public final StringContext string() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(477); + setState(479); match(STRING); } } @@ -4154,7 +4157,7 @@ public final ComparisonOperatorContext comparisonOperator() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(479); + setState(481); _la = _input.LA(1); if ( !(((_la) & ~0x3f) == 0 && ((1L << _la) & 283726776524341248L) != 0) ) { _errHandler.recoverInline(this); @@ -4209,9 +4212,9 @@ public final ExplainCommandContext explainCommand() throws RecognitionException try { enterOuterAlt(_localctx, 1); { - setState(481); + setState(483); match(EXPLAIN); - setState(482); + setState(484); subqueryExpression(); } } @@ -4259,11 +4262,11 @@ public final SubqueryExpressionContext subqueryExpression() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(484); + setState(486); match(OPENING_BRACKET); - setState(485); + setState(487); query(0); - setState(486); + setState(488); match(CLOSING_BRACKET); } } @@ -4337,16 +4340,16 @@ public final ShowCommandContext showCommand() throws RecognitionException { ShowCommandContext _localctx = new ShowCommandContext(_ctx, getState()); enterRule(_localctx, 92, RULE_showCommand); try { - setState(492); + setState(494); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,47,_ctx) ) { case 1: _localctx = new ShowInfoContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(488); + setState(490); match(SHOW); - setState(489); + setState(491); match(INFO); } break; @@ -4354,9 +4357,9 @@ public final ShowCommandContext showCommand() throws RecognitionException { _localctx = new ShowFunctionsContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(490); + setState(492); match(SHOW); - setState(491); + setState(493); match(FUNCTIONS); } break; @@ -4375,11 +4378,15 @@ public final ShowCommandContext showCommand() throws RecognitionException { @SuppressWarnings("CheckReturnValue") public static class EnrichCommandContext extends ParserRuleContext { - public FromIdentifierContext policyName; + public Token policyName; public QualifiedNamePatternContext matchField; public TerminalNode ENRICH() { return getToken(EsqlBaseParser.ENRICH, 0); } - public FromIdentifierContext fromIdentifier() { - return getRuleContext(FromIdentifierContext.class,0); + public TerminalNode ENRICH_POLICY_NAME() { return getToken(EsqlBaseParser.ENRICH_POLICY_NAME, 0); } + public List setting() { + return getRuleContexts(SettingContext.class); + } + public SettingContext setting(int i) { + return getRuleContext(SettingContext.class,i); } public TerminalNode ON() { return getToken(EsqlBaseParser.ON, 0); } public TerminalNode WITH() { return getToken(EsqlBaseParser.WITH, 0); } @@ -4419,52 +4426,67 @@ public T accept(ParseTreeVisitor visitor) { public final EnrichCommandContext enrichCommand() throws RecognitionException { EnrichCommandContext _localctx = new EnrichCommandContext(_ctx, getState()); enterRule(_localctx, 94, RULE_enrichCommand); + int _la; try { int _alt; enterOuterAlt(_localctx, 1); { - setState(494); + setState(496); match(ENRICH); - setState(495); - ((EnrichCommandContext)_localctx).policyName = fromIdentifier(); - setState(498); + setState(500); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,48,_ctx) ) { + _la = _input.LA(1); + while (_la==OPENING_BRACKET) { + { + { + setState(497); + setting(); + } + } + setState(502); + _errHandler.sync(this); + _la = _input.LA(1); + } + setState(503); + ((EnrichCommandContext)_localctx).policyName = match(ENRICH_POLICY_NAME); + setState(506); + _errHandler.sync(this); + switch ( getInterpreter().adaptivePredict(_input,49,_ctx) ) { case 1: { - setState(496); + setState(504); match(ON); - setState(497); + setState(505); ((EnrichCommandContext)_localctx).matchField = qualifiedNamePattern(); } break; } - setState(509); + setState(517); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,50,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,51,_ctx) ) { case 1: { - setState(500); + setState(508); match(WITH); - setState(501); + setState(509); enrichWithClause(); - setState(506); + setState(514); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,49,_ctx); + _alt = getInterpreter().adaptivePredict(_input,50,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(502); + setState(510); match(COMMA); - setState(503); + setState(511); enrichWithClause(); } } } - setState(508); + setState(516); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,49,_ctx); + _alt = getInterpreter().adaptivePredict(_input,50,_ctx); } } break; @@ -4519,19 +4541,19 @@ public final EnrichWithClauseContext enrichWithClause() throws RecognitionExcept try { enterOuterAlt(_localctx, 1); { - setState(514); + setState(522); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,51,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,52,_ctx) ) { case 1: { - setState(511); + setState(519); ((EnrichWithClauseContext)_localctx).newName = qualifiedNamePattern(); - setState(512); + setState(520); match(ASSIGN); } break; } - setState(516); + setState(524); ((EnrichWithClauseContext)_localctx).enrichField = qualifiedNamePattern(); } } @@ -4546,6 +4568,66 @@ public final EnrichWithClauseContext enrichWithClause() throws RecognitionExcept return _localctx; } + @SuppressWarnings("CheckReturnValue") + public static class SettingContext extends ParserRuleContext { + public Token name; + public Token value; + public TerminalNode OPENING_BRACKET() { return getToken(EsqlBaseParser.OPENING_BRACKET, 0); } + public TerminalNode COLON() { return getToken(EsqlBaseParser.COLON, 0); } + public TerminalNode CLOSING_BRACKET() { return getToken(EsqlBaseParser.CLOSING_BRACKET, 0); } + public List SETTING() { return getTokens(EsqlBaseParser.SETTING); } + public TerminalNode SETTING(int i) { + return getToken(EsqlBaseParser.SETTING, i); + } + @SuppressWarnings("this-escape") + public SettingContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_setting; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterSetting(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitSetting(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor)visitor).visitSetting(this); + else return visitor.visitChildren(this); + } + } + + public final SettingContext setting() throws RecognitionException { + SettingContext _localctx = new SettingContext(_ctx, getState()); + enterRule(_localctx, 98, RULE_setting); + try { + enterOuterAlt(_localctx, 1); + { + setState(526); + match(OPENING_BRACKET); + setState(527); + ((SettingContext)_localctx).name = match(SETTING); + setState(528); + match(COLON); + setState(529); + ((SettingContext)_localctx).value = match(SETTING); + setState(530); + match(CLOSING_BRACKET); + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) { switch (ruleIndex) { case 1: @@ -4584,7 +4666,7 @@ private boolean operatorExpression_sempred(OperatorExpressionContext _localctx, } public static final String _serializedATN = - "\u0004\u0001b\u0207\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002"+ + "\u0004\u0001h\u0215\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002"+ "\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004\u0002"+ "\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007\u0002"+ "\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b\u0002"+ @@ -4597,327 +4679,335 @@ private boolean operatorExpression_sempred(OperatorExpressionContext _localctx, "\u0002\u001f\u0007\u001f\u0002 \u0007 \u0002!\u0007!\u0002\"\u0007\"\u0002"+ "#\u0007#\u0002$\u0007$\u0002%\u0007%\u0002&\u0007&\u0002\'\u0007\'\u0002"+ "(\u0007(\u0002)\u0007)\u0002*\u0007*\u0002+\u0007+\u0002,\u0007,\u0002"+ - "-\u0007-\u0002.\u0007.\u0002/\u0007/\u00020\u00070\u0001\u0000\u0001\u0000"+ - "\u0001\u0000\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ - "\u0001\u0001\u0005\u0001l\b\u0001\n\u0001\f\u0001o\t\u0001\u0001\u0002"+ - "\u0001\u0002\u0001\u0002\u0001\u0002\u0003\u0002u\b\u0002\u0001\u0003"+ - "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003"+ - "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003"+ - "\u0003\u0003\u0084\b\u0003\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0005"+ - "\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ - "\u0003\u0005\u0090\b\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ - "\u0001\u0005\u0005\u0005\u0097\b\u0005\n\u0005\f\u0005\u009a\t\u0005\u0001"+ - "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u00a1"+ - "\b\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u00a5\b\u0005\u0001\u0005"+ - "\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0005\u0005"+ - "\u00ad\b\u0005\n\u0005\f\u0005\u00b0\t\u0005\u0001\u0006\u0001\u0006\u0003"+ - "\u0006\u00b4\b\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001"+ - "\u0006\u0003\u0006\u00bb\b\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0003"+ - "\u0006\u00c0\b\u0006\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001"+ - "\u0007\u0003\u0007\u00c7\b\u0007\u0001\b\u0001\b\u0001\b\u0001\b\u0003"+ - "\b\u00cd\b\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0005\b\u00d5"+ - "\b\b\n\b\f\b\u00d8\t\b\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t"+ - "\u0001\t\u0003\t\u00e1\b\t\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001"+ - "\n\u0005\n\u00e9\b\n\n\n\f\n\u00ec\t\n\u0003\n\u00ee\b\n\u0001\n\u0001"+ - "\n\u0001\u000b\u0001\u000b\u0001\u000b\u0001\f\u0001\f\u0001\f\u0005\f"+ - "\u00f8\b\f\n\f\f\f\u00fb\t\f\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0003"+ - "\r\u0102\b\r\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0005\u000e"+ - "\u0108\b\u000e\n\u000e\f\u000e\u010b\t\u000e\u0001\u000e\u0003\u000e\u010e"+ - "\b\u000e\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0005"+ - "\u000f\u0115\b\u000f\n\u000f\f\u000f\u0118\t\u000f\u0001\u000f\u0001\u000f"+ - "\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0011\u0001\u0011\u0003\u0011"+ - "\u0121\b\u0011\u0001\u0011\u0001\u0011\u0003\u0011\u0125\b\u0011\u0001"+ - "\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0003\u0012\u012b\b\u0012\u0001"+ - "\u0013\u0001\u0013\u0001\u0013\u0005\u0013\u0130\b\u0013\n\u0013\f\u0013"+ - "\u0133\t\u0013\u0001\u0014\u0001\u0014\u0001\u0015\u0001\u0015\u0001\u0015"+ - "\u0005\u0015\u013a\b\u0015\n\u0015\f\u0015\u013d\t\u0015\u0001\u0016\u0001"+ - "\u0016\u0001\u0016\u0005\u0016\u0142\b\u0016\n\u0016\f\u0016\u0145\t\u0016"+ - "\u0001\u0017\u0001\u0017\u0001\u0018\u0001\u0018\u0001\u0019\u0001\u0019"+ + "-\u0007-\u0002.\u0007.\u0002/\u0007/\u00020\u00070\u00021\u00071\u0001"+ + "\u0000\u0001\u0000\u0001\u0000\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ + "\u0001\u0001\u0001\u0001\u0001\u0005\u0001n\b\u0001\n\u0001\f\u0001q\t"+ + "\u0001\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0003\u0002w\b"+ + "\u0002\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ + "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ + "\u0003\u0001\u0003\u0003\u0003\u0086\b\u0003\u0001\u0004\u0001\u0004\u0001"+ + "\u0004\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ + "\u0005\u0001\u0005\u0003\u0005\u0092\b\u0005\u0001\u0005\u0001\u0005\u0001"+ + "\u0005\u0001\u0005\u0001\u0005\u0005\u0005\u0099\b\u0005\n\u0005\f\u0005"+ + "\u009c\t\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ + "\u0003\u0005\u00a3\b\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u00a7\b"+ + "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ + "\u0005\u0005\u0005\u00af\b\u0005\n\u0005\f\u0005\u00b2\t\u0005\u0001\u0006"+ + "\u0001\u0006\u0003\u0006\u00b6\b\u0006\u0001\u0006\u0001\u0006\u0001\u0006"+ + "\u0001\u0006\u0001\u0006\u0003\u0006\u00bd\b\u0006\u0001\u0006\u0001\u0006"+ + "\u0001\u0006\u0003\u0006\u00c2\b\u0006\u0001\u0007\u0001\u0007\u0001\u0007"+ + "\u0001\u0007\u0001\u0007\u0003\u0007\u00c9\b\u0007\u0001\b\u0001\b\u0001"+ + "\b\u0001\b\u0003\b\u00cf\b\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001"+ + "\b\u0005\b\u00d7\b\b\n\b\f\b\u00da\t\b\u0001\t\u0001\t\u0001\t\u0001\t"+ + "\u0001\t\u0001\t\u0001\t\u0003\t\u00e3\b\t\u0001\n\u0001\n\u0001\n\u0001"+ + "\n\u0001\n\u0001\n\u0005\n\u00eb\b\n\n\n\f\n\u00ee\t\n\u0003\n\u00f0\b"+ + "\n\u0001\n\u0001\n\u0001\u000b\u0001\u000b\u0001\u000b\u0001\f\u0001\f"+ + "\u0001\f\u0005\f\u00fa\b\f\n\f\f\f\u00fd\t\f\u0001\r\u0001\r\u0001\r\u0001"+ + "\r\u0001\r\u0003\r\u0104\b\r\u0001\u000e\u0001\u000e\u0001\u000e\u0001"+ + "\u000e\u0005\u000e\u010a\b\u000e\n\u000e\f\u000e\u010d\t\u000e\u0001\u000e"+ + "\u0003\u000e\u0110\b\u000e\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f"+ + "\u0001\u000f\u0005\u000f\u0117\b\u000f\n\u000f\f\u000f\u011a\t\u000f\u0001"+ + "\u000f\u0001\u000f\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0011\u0001"+ + "\u0011\u0003\u0011\u0123\b\u0011\u0001\u0011\u0001\u0011\u0003\u0011\u0127"+ + "\b\u0011\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0003\u0012\u012d"+ + "\b\u0012\u0001\u0013\u0001\u0013\u0001\u0013\u0005\u0013\u0132\b\u0013"+ + "\n\u0013\f\u0013\u0135\t\u0013\u0001\u0014\u0001\u0014\u0001\u0015\u0001"+ + "\u0015\u0001\u0015\u0005\u0015\u013c\b\u0015\n\u0015\f\u0015\u013f\t\u0015"+ + "\u0001\u0016\u0001\u0016\u0001\u0016\u0005\u0016\u0144\b\u0016\n\u0016"+ + "\f\u0016\u0147\t\u0016\u0001\u0017\u0001\u0017\u0001\u0018\u0001\u0018"+ + "\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u0019"+ "\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u0019"+ - "\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u0019\u0005\u0019"+ - "\u0158\b\u0019\n\u0019\f\u0019\u015b\t\u0019\u0001\u0019\u0001\u0019\u0001"+ - "\u0019\u0001\u0019\u0001\u0019\u0001\u0019\u0005\u0019\u0163\b\u0019\n"+ - "\u0019\f\u0019\u0166\t\u0019\u0001\u0019\u0001\u0019\u0001\u0019\u0001"+ - "\u0019\u0001\u0019\u0001\u0019\u0005\u0019\u016e\b\u0019\n\u0019\f\u0019"+ - "\u0171\t\u0019\u0001\u0019\u0001\u0019\u0003\u0019\u0175\b\u0019\u0001"+ - "\u001a\u0001\u001a\u0001\u001a\u0001\u001b\u0001\u001b\u0001\u001b\u0001"+ - "\u001b\u0005\u001b\u017e\b\u001b\n\u001b\f\u001b\u0181\t\u001b\u0001\u001c"+ - "\u0001\u001c\u0003\u001c\u0185\b\u001c\u0001\u001c\u0001\u001c\u0003\u001c"+ - "\u0189\b\u001c\u0001\u001d\u0001\u001d\u0001\u001d\u0001\u001d\u0005\u001d"+ - "\u018f\b\u001d\n\u001d\f\u001d\u0192\t\u001d\u0001\u001d\u0001\u001d\u0001"+ - "\u001d\u0001\u001d\u0005\u001d\u0198\b\u001d\n\u001d\f\u001d\u019b\t\u001d"+ - "\u0003\u001d\u019d\b\u001d\u0001\u001e\u0001\u001e\u0001\u001e\u0001\u001e"+ - "\u0005\u001e\u01a3\b\u001e\n\u001e\f\u001e\u01a6\t\u001e\u0001\u001f\u0001"+ - "\u001f\u0001\u001f\u0001\u001f\u0005\u001f\u01ac\b\u001f\n\u001f\f\u001f"+ - "\u01af\t\u001f\u0001 \u0001 \u0001 \u0001 \u0001!\u0001!\u0001!\u0001"+ - "!\u0003!\u01b9\b!\u0001\"\u0001\"\u0001\"\u0001\"\u0001#\u0001#\u0001"+ - "#\u0001$\u0001$\u0001$\u0005$\u01c5\b$\n$\f$\u01c8\t$\u0001%\u0001%\u0001"+ - "%\u0001%\u0001&\u0001&\u0001\'\u0001\'\u0003\'\u01d2\b\'\u0001(\u0003"+ - "(\u01d5\b(\u0001(\u0001(\u0001)\u0003)\u01da\b)\u0001)\u0001)\u0001*\u0001"+ - "*\u0001+\u0001+\u0001,\u0001,\u0001,\u0001-\u0001-\u0001-\u0001-\u0001"+ - ".\u0001.\u0001.\u0001.\u0003.\u01ed\b.\u0001/\u0001/\u0001/\u0001/\u0003"+ - "/\u01f3\b/\u0001/\u0001/\u0001/\u0001/\u0005/\u01f9\b/\n/\f/\u01fc\t/"+ - "\u0003/\u01fe\b/\u00010\u00010\u00010\u00030\u0203\b0\u00010\u00010\u0001"+ - "0\u0000\u0003\u0002\n\u00101\u0000\u0002\u0004\u0006\b\n\f\u000e\u0010"+ - "\u0012\u0014\u0016\u0018\u001a\u001c\u001e \"$&(*,.02468:<>@BDFHJLNPR"+ - "TVXZ\\^`\u0000\t\u0001\u0000:;\u0001\u0000<>\u0002\u0000BBGG\u0001\u0000"+ - "AB\u0002\u0000BBKK\u0002\u0000 ##\u0001\u0000&\'\u0002\u0000%%33\u0001"+ - "\u000049\u0224\u0000b\u0001\u0000\u0000\u0000\u0002e\u0001\u0000\u0000"+ - "\u0000\u0004t\u0001\u0000\u0000\u0000\u0006\u0083\u0001\u0000\u0000\u0000"+ - "\b\u0085\u0001\u0000\u0000\u0000\n\u00a4\u0001\u0000\u0000\u0000\f\u00bf"+ - "\u0001\u0000\u0000\u0000\u000e\u00c6\u0001\u0000\u0000\u0000\u0010\u00cc"+ - "\u0001\u0000\u0000\u0000\u0012\u00e0\u0001\u0000\u0000\u0000\u0014\u00e2"+ - "\u0001\u0000\u0000\u0000\u0016\u00f1\u0001\u0000\u0000\u0000\u0018\u00f4"+ - "\u0001\u0000\u0000\u0000\u001a\u0101\u0001\u0000\u0000\u0000\u001c\u0103"+ - "\u0001\u0000\u0000\u0000\u001e\u010f\u0001\u0000\u0000\u0000 \u011b\u0001"+ - "\u0000\u0000\u0000\"\u011e\u0001\u0000\u0000\u0000$\u0126\u0001\u0000"+ - "\u0000\u0000&\u012c\u0001\u0000\u0000\u0000(\u0134\u0001\u0000\u0000\u0000"+ - "*\u0136\u0001\u0000\u0000\u0000,\u013e\u0001\u0000\u0000\u0000.\u0146"+ - "\u0001\u0000\u0000\u00000\u0148\u0001\u0000\u0000\u00002\u0174\u0001\u0000"+ - "\u0000\u00004\u0176\u0001\u0000\u0000\u00006\u0179\u0001\u0000\u0000\u0000"+ - "8\u0182\u0001\u0000\u0000\u0000:\u019c\u0001\u0000\u0000\u0000<\u019e"+ - "\u0001\u0000\u0000\u0000>\u01a7\u0001\u0000\u0000\u0000@\u01b0\u0001\u0000"+ - "\u0000\u0000B\u01b4\u0001\u0000\u0000\u0000D\u01ba\u0001\u0000\u0000\u0000"+ - "F\u01be\u0001\u0000\u0000\u0000H\u01c1\u0001\u0000\u0000\u0000J\u01c9"+ - "\u0001\u0000\u0000\u0000L\u01cd\u0001\u0000\u0000\u0000N\u01d1\u0001\u0000"+ - "\u0000\u0000P\u01d4\u0001\u0000\u0000\u0000R\u01d9\u0001\u0000\u0000\u0000"+ - "T\u01dd\u0001\u0000\u0000\u0000V\u01df\u0001\u0000\u0000\u0000X\u01e1"+ - "\u0001\u0000\u0000\u0000Z\u01e4\u0001\u0000\u0000\u0000\\\u01ec\u0001"+ - "\u0000\u0000\u0000^\u01ee\u0001\u0000\u0000\u0000`\u0202\u0001\u0000\u0000"+ - "\u0000bc\u0003\u0002\u0001\u0000cd\u0005\u0000\u0000\u0001d\u0001\u0001"+ - "\u0000\u0000\u0000ef\u0006\u0001\uffff\uffff\u0000fg\u0003\u0004\u0002"+ - "\u0000gm\u0001\u0000\u0000\u0000hi\n\u0001\u0000\u0000ij\u0005\u001a\u0000"+ - "\u0000jl\u0003\u0006\u0003\u0000kh\u0001\u0000\u0000\u0000lo\u0001\u0000"+ - "\u0000\u0000mk\u0001\u0000\u0000\u0000mn\u0001\u0000\u0000\u0000n\u0003"+ - "\u0001\u0000\u0000\u0000om\u0001\u0000\u0000\u0000pu\u0003X,\u0000qu\u0003"+ - "\u001c\u000e\u0000ru\u0003\u0016\u000b\u0000su\u0003\\.\u0000tp\u0001"+ - "\u0000\u0000\u0000tq\u0001\u0000\u0000\u0000tr\u0001\u0000\u0000\u0000"+ - "ts\u0001\u0000\u0000\u0000u\u0005\u0001\u0000\u0000\u0000v\u0084\u0003"+ - " \u0010\u0000w\u0084\u0003$\u0012\u0000x\u0084\u00034\u001a\u0000y\u0084"+ - "\u0003:\u001d\u0000z\u0084\u00036\u001b\u0000{\u0084\u0003\"\u0011\u0000"+ - "|\u0084\u0003\b\u0004\u0000}\u0084\u0003<\u001e\u0000~\u0084\u0003>\u001f"+ - "\u0000\u007f\u0084\u0003B!\u0000\u0080\u0084\u0003D\"\u0000\u0081\u0084"+ - "\u0003^/\u0000\u0082\u0084\u0003F#\u0000\u0083v\u0001\u0000\u0000\u0000"+ - "\u0083w\u0001\u0000\u0000\u0000\u0083x\u0001\u0000\u0000\u0000\u0083y"+ - "\u0001\u0000\u0000\u0000\u0083z\u0001\u0000\u0000\u0000\u0083{\u0001\u0000"+ - "\u0000\u0000\u0083|\u0001\u0000\u0000\u0000\u0083}\u0001\u0000\u0000\u0000"+ - "\u0083~\u0001\u0000\u0000\u0000\u0083\u007f\u0001\u0000\u0000\u0000\u0083"+ - "\u0080\u0001\u0000\u0000\u0000\u0083\u0081\u0001\u0000\u0000\u0000\u0083"+ - "\u0082\u0001\u0000\u0000\u0000\u0084\u0007\u0001\u0000\u0000\u0000\u0085"+ - "\u0086\u0005\u0012\u0000\u0000\u0086\u0087\u0003\n\u0005\u0000\u0087\t"+ - "\u0001\u0000\u0000\u0000\u0088\u0089\u0006\u0005\uffff\uffff\u0000\u0089"+ - "\u008a\u0005,\u0000\u0000\u008a\u00a5\u0003\n\u0005\u0007\u008b\u00a5"+ - "\u0003\u000e\u0007\u0000\u008c\u00a5\u0003\f\u0006\u0000\u008d\u008f\u0003"+ - "\u000e\u0007\u0000\u008e\u0090\u0005,\u0000\u0000\u008f\u008e\u0001\u0000"+ - "\u0000\u0000\u008f\u0090\u0001\u0000\u0000\u0000\u0090\u0091\u0001\u0000"+ - "\u0000\u0000\u0091\u0092\u0005)\u0000\u0000\u0092\u0093\u0005(\u0000\u0000"+ - "\u0093\u0098\u0003\u000e\u0007\u0000\u0094\u0095\u0005\"\u0000\u0000\u0095"+ - "\u0097\u0003\u000e\u0007\u0000\u0096\u0094\u0001\u0000\u0000\u0000\u0097"+ - "\u009a\u0001\u0000\u0000\u0000\u0098\u0096\u0001\u0000\u0000\u0000\u0098"+ - "\u0099\u0001\u0000\u0000\u0000\u0099\u009b\u0001\u0000\u0000\u0000\u009a"+ - "\u0098\u0001\u0000\u0000\u0000\u009b\u009c\u00052\u0000\u0000\u009c\u00a5"+ - "\u0001\u0000\u0000\u0000\u009d\u009e\u0003\u000e\u0007\u0000\u009e\u00a0"+ - "\u0005*\u0000\u0000\u009f\u00a1\u0005,\u0000\u0000\u00a0\u009f\u0001\u0000"+ - "\u0000\u0000\u00a0\u00a1\u0001\u0000\u0000\u0000\u00a1\u00a2\u0001\u0000"+ - "\u0000\u0000\u00a2\u00a3\u0005-\u0000\u0000\u00a3\u00a5\u0001\u0000\u0000"+ - "\u0000\u00a4\u0088\u0001\u0000\u0000\u0000\u00a4\u008b\u0001\u0000\u0000"+ - "\u0000\u00a4\u008c\u0001\u0000\u0000\u0000\u00a4\u008d\u0001\u0000\u0000"+ - "\u0000\u00a4\u009d\u0001\u0000\u0000\u0000\u00a5\u00ae\u0001\u0000\u0000"+ - "\u0000\u00a6\u00a7\n\u0004\u0000\u0000\u00a7\u00a8\u0005\u001f\u0000\u0000"+ - "\u00a8\u00ad\u0003\n\u0005\u0005\u00a9\u00aa\n\u0003\u0000\u0000\u00aa"+ - "\u00ab\u0005/\u0000\u0000\u00ab\u00ad\u0003\n\u0005\u0004\u00ac\u00a6"+ - "\u0001\u0000\u0000\u0000\u00ac\u00a9\u0001\u0000\u0000\u0000\u00ad\u00b0"+ - "\u0001\u0000\u0000\u0000\u00ae\u00ac\u0001\u0000\u0000\u0000\u00ae\u00af"+ - "\u0001\u0000\u0000\u0000\u00af\u000b\u0001\u0000\u0000\u0000\u00b0\u00ae"+ - "\u0001\u0000\u0000\u0000\u00b1\u00b3\u0003\u000e\u0007\u0000\u00b2\u00b4"+ - "\u0005,\u0000\u0000\u00b3\u00b2\u0001\u0000\u0000\u0000\u00b3\u00b4\u0001"+ - "\u0000\u0000\u0000\u00b4\u00b5\u0001\u0000\u0000\u0000\u00b5\u00b6\u0005"+ - "+\u0000\u0000\u00b6\u00b7\u0003T*\u0000\u00b7\u00c0\u0001\u0000\u0000"+ - "\u0000\u00b8\u00ba\u0003\u000e\u0007\u0000\u00b9\u00bb\u0005,\u0000\u0000"+ - "\u00ba\u00b9\u0001\u0000\u0000\u0000\u00ba\u00bb\u0001\u0000\u0000\u0000"+ - "\u00bb\u00bc\u0001\u0000\u0000\u0000\u00bc\u00bd\u00051\u0000\u0000\u00bd"+ - "\u00be\u0003T*\u0000\u00be\u00c0\u0001\u0000\u0000\u0000\u00bf\u00b1\u0001"+ - "\u0000\u0000\u0000\u00bf\u00b8\u0001\u0000\u0000\u0000\u00c0\r\u0001\u0000"+ - "\u0000\u0000\u00c1\u00c7\u0003\u0010\b\u0000\u00c2\u00c3\u0003\u0010\b"+ - "\u0000\u00c3\u00c4\u0003V+\u0000\u00c4\u00c5\u0003\u0010\b\u0000\u00c5"+ - "\u00c7\u0001\u0000\u0000\u0000\u00c6\u00c1\u0001\u0000\u0000\u0000\u00c6"+ - "\u00c2\u0001\u0000\u0000\u0000\u00c7\u000f\u0001\u0000\u0000\u0000\u00c8"+ - "\u00c9\u0006\b\uffff\uffff\u0000\u00c9\u00cd\u0003\u0012\t\u0000\u00ca"+ - "\u00cb\u0007\u0000\u0000\u0000\u00cb\u00cd\u0003\u0010\b\u0003\u00cc\u00c8"+ - "\u0001\u0000\u0000\u0000\u00cc\u00ca\u0001\u0000\u0000\u0000\u00cd\u00d6"+ - "\u0001\u0000\u0000\u0000\u00ce\u00cf\n\u0002\u0000\u0000\u00cf\u00d0\u0007"+ - "\u0001\u0000\u0000\u00d0\u00d5\u0003\u0010\b\u0003\u00d1\u00d2\n\u0001"+ - "\u0000\u0000\u00d2\u00d3\u0007\u0000\u0000\u0000\u00d3\u00d5\u0003\u0010"+ - "\b\u0002\u00d4\u00ce\u0001\u0000\u0000\u0000\u00d4\u00d1\u0001\u0000\u0000"+ - "\u0000\u00d5\u00d8\u0001\u0000\u0000\u0000\u00d6\u00d4\u0001\u0000\u0000"+ - "\u0000\u00d6\u00d7\u0001\u0000\u0000\u0000\u00d7\u0011\u0001\u0000\u0000"+ - "\u0000\u00d8\u00d6\u0001\u0000\u0000\u0000\u00d9\u00e1\u00032\u0019\u0000"+ - "\u00da\u00e1\u0003*\u0015\u0000\u00db\u00e1\u0003\u0014\n\u0000\u00dc"+ - "\u00dd\u0005(\u0000\u0000\u00dd\u00de\u0003\n\u0005\u0000\u00de\u00df"+ - "\u00052\u0000\u0000\u00df\u00e1\u0001\u0000\u0000\u0000\u00e0\u00d9\u0001"+ - "\u0000\u0000\u0000\u00e0\u00da\u0001\u0000\u0000\u0000\u00e0\u00db\u0001"+ - "\u0000\u0000\u0000\u00e0\u00dc\u0001\u0000\u0000\u0000\u00e1\u0013\u0001"+ - "\u0000\u0000\u0000\u00e2\u00e3\u0003.\u0017\u0000\u00e3\u00ed\u0005(\u0000"+ - "\u0000\u00e4\u00ee\u0005<\u0000\u0000\u00e5\u00ea\u0003\n\u0005\u0000"+ - "\u00e6\u00e7\u0005\"\u0000\u0000\u00e7\u00e9\u0003\n\u0005\u0000\u00e8"+ - "\u00e6\u0001\u0000\u0000\u0000\u00e9\u00ec\u0001\u0000\u0000\u0000\u00ea"+ - "\u00e8\u0001\u0000\u0000\u0000\u00ea\u00eb\u0001\u0000\u0000\u0000\u00eb"+ - "\u00ee\u0001\u0000\u0000\u0000\u00ec\u00ea\u0001\u0000\u0000\u0000\u00ed"+ - "\u00e4\u0001\u0000\u0000\u0000\u00ed\u00e5\u0001\u0000\u0000\u0000\u00ed"+ - "\u00ee\u0001\u0000\u0000\u0000\u00ee\u00ef\u0001\u0000\u0000\u0000\u00ef"+ - "\u00f0\u00052\u0000\u0000\u00f0\u0015\u0001\u0000\u0000\u0000\u00f1\u00f2"+ - "\u0005\u000e\u0000\u0000\u00f2\u00f3\u0003\u0018\f\u0000\u00f3\u0017\u0001"+ - "\u0000\u0000\u0000\u00f4\u00f9\u0003\u001a\r\u0000\u00f5\u00f6\u0005\""+ - "\u0000\u0000\u00f6\u00f8\u0003\u001a\r\u0000\u00f7\u00f5\u0001\u0000\u0000"+ - "\u0000\u00f8\u00fb\u0001\u0000\u0000\u0000\u00f9\u00f7\u0001\u0000\u0000"+ - "\u0000\u00f9\u00fa\u0001\u0000\u0000\u0000\u00fa\u0019\u0001\u0000\u0000"+ - "\u0000\u00fb\u00f9\u0001\u0000\u0000\u0000\u00fc\u0102\u0003\n\u0005\u0000"+ - "\u00fd\u00fe\u0003*\u0015\u0000\u00fe\u00ff\u0005!\u0000\u0000\u00ff\u0100"+ - "\u0003\n\u0005\u0000\u0100\u0102\u0001\u0000\u0000\u0000\u0101\u00fc\u0001"+ - "\u0000\u0000\u0000\u0101\u00fd\u0001\u0000\u0000\u0000\u0102\u001b\u0001"+ - "\u0000\u0000\u0000\u0103\u0104\u0005\u0006\u0000\u0000\u0104\u0109\u0003"+ - "(\u0014\u0000\u0105\u0106\u0005\"\u0000\u0000\u0106\u0108\u0003(\u0014"+ - "\u0000\u0107\u0105\u0001\u0000\u0000\u0000\u0108\u010b\u0001\u0000\u0000"+ - "\u0000\u0109\u0107\u0001\u0000\u0000\u0000\u0109\u010a\u0001\u0000\u0000"+ - "\u0000\u010a\u010d\u0001\u0000\u0000\u0000\u010b\u0109\u0001\u0000\u0000"+ - "\u0000\u010c\u010e\u0003\u001e\u000f\u0000\u010d\u010c\u0001\u0000\u0000"+ - "\u0000\u010d\u010e\u0001\u0000\u0000\u0000\u010e\u001d\u0001\u0000\u0000"+ - "\u0000\u010f\u0110\u0005?\u0000\u0000\u0110\u0111\u0005F\u0000\u0000\u0111"+ - "\u0116\u0003(\u0014\u0000\u0112\u0113\u0005\"\u0000\u0000\u0113\u0115"+ - "\u0003(\u0014\u0000\u0114\u0112\u0001\u0000\u0000\u0000\u0115\u0118\u0001"+ - "\u0000\u0000\u0000\u0116\u0114\u0001\u0000\u0000\u0000\u0116\u0117\u0001"+ - "\u0000\u0000\u0000\u0117\u0119\u0001\u0000\u0000\u0000\u0118\u0116\u0001"+ - "\u0000\u0000\u0000\u0119\u011a\u0005@\u0000\u0000\u011a\u001f\u0001\u0000"+ - "\u0000\u0000\u011b\u011c\u0005\u0004\u0000\u0000\u011c\u011d\u0003\u0018"+ - "\f\u0000\u011d!\u0001\u0000\u0000\u0000\u011e\u0120\u0005\u0011\u0000"+ - "\u0000\u011f\u0121\u0003\u0018\f\u0000\u0120\u011f\u0001\u0000\u0000\u0000"+ - "\u0120\u0121\u0001\u0000\u0000\u0000\u0121\u0124\u0001\u0000\u0000\u0000"+ - "\u0122\u0123\u0005\u001e\u0000\u0000\u0123\u0125\u0003&\u0013\u0000\u0124"+ - "\u0122\u0001\u0000\u0000\u0000\u0124\u0125\u0001\u0000\u0000\u0000\u0125"+ - "#\u0001\u0000\u0000\u0000\u0126\u0127\u0005\b\u0000\u0000\u0127\u012a"+ - "\u0003\u0018\f\u0000\u0128\u0129\u0005\u001e\u0000\u0000\u0129\u012b\u0003"+ - "&\u0013\u0000\u012a\u0128\u0001\u0000\u0000\u0000\u012a\u012b\u0001\u0000"+ - "\u0000\u0000\u012b%\u0001\u0000\u0000\u0000\u012c\u0131\u0003*\u0015\u0000"+ - "\u012d\u012e\u0005\"\u0000\u0000\u012e\u0130\u0003*\u0015\u0000\u012f"+ - "\u012d\u0001\u0000\u0000\u0000\u0130\u0133\u0001\u0000\u0000\u0000\u0131"+ - "\u012f\u0001\u0000\u0000\u0000\u0131\u0132\u0001\u0000\u0000\u0000\u0132"+ - "\'\u0001\u0000\u0000\u0000\u0133\u0131\u0001\u0000\u0000\u0000\u0134\u0135"+ - "\u0007\u0002\u0000\u0000\u0135)\u0001\u0000\u0000\u0000\u0136\u013b\u0003"+ - ".\u0017\u0000\u0137\u0138\u0005$\u0000\u0000\u0138\u013a\u0003.\u0017"+ - "\u0000\u0139\u0137\u0001\u0000\u0000\u0000\u013a\u013d\u0001\u0000\u0000"+ - "\u0000\u013b\u0139\u0001\u0000\u0000\u0000\u013b\u013c\u0001\u0000\u0000"+ - "\u0000\u013c+\u0001\u0000\u0000\u0000\u013d\u013b\u0001\u0000\u0000\u0000"+ - "\u013e\u0143\u00030\u0018\u0000\u013f\u0140\u0005$\u0000\u0000\u0140\u0142"+ - "\u00030\u0018\u0000\u0141\u013f\u0001\u0000\u0000\u0000\u0142\u0145\u0001"+ - "\u0000\u0000\u0000\u0143\u0141\u0001\u0000\u0000\u0000\u0143\u0144\u0001"+ - "\u0000\u0000\u0000\u0144-\u0001\u0000\u0000\u0000\u0145\u0143\u0001\u0000"+ - "\u0000\u0000\u0146\u0147\u0007\u0003\u0000\u0000\u0147/\u0001\u0000\u0000"+ - "\u0000\u0148\u0149\u0007\u0004\u0000\u0000\u01491\u0001\u0000\u0000\u0000"+ - "\u014a\u0175\u0005-\u0000\u0000\u014b\u014c\u0003R)\u0000\u014c\u014d"+ - "\u0005A\u0000\u0000\u014d\u0175\u0001\u0000\u0000\u0000\u014e\u0175\u0003"+ - "P(\u0000\u014f\u0175\u0003R)\u0000\u0150\u0175\u0003L&\u0000\u0151\u0175"+ - "\u00050\u0000\u0000\u0152\u0175\u0003T*\u0000\u0153\u0154\u0005?\u0000"+ - "\u0000\u0154\u0159\u0003N\'\u0000\u0155\u0156\u0005\"\u0000\u0000\u0156"+ - "\u0158\u0003N\'\u0000\u0157\u0155\u0001\u0000\u0000\u0000\u0158\u015b"+ - "\u0001\u0000\u0000\u0000\u0159\u0157\u0001\u0000\u0000\u0000\u0159\u015a"+ - "\u0001\u0000\u0000\u0000\u015a\u015c\u0001\u0000\u0000\u0000\u015b\u0159"+ - "\u0001\u0000\u0000\u0000\u015c\u015d\u0005@\u0000\u0000\u015d\u0175\u0001"+ - "\u0000\u0000\u0000\u015e\u015f\u0005?\u0000\u0000\u015f\u0164\u0003L&"+ - "\u0000\u0160\u0161\u0005\"\u0000\u0000\u0161\u0163\u0003L&\u0000\u0162"+ - "\u0160\u0001\u0000\u0000\u0000\u0163\u0166\u0001\u0000\u0000\u0000\u0164"+ - "\u0162\u0001\u0000\u0000\u0000\u0164\u0165\u0001\u0000\u0000\u0000\u0165"+ - "\u0167\u0001\u0000\u0000\u0000\u0166\u0164\u0001\u0000\u0000\u0000\u0167"+ - "\u0168\u0005@\u0000\u0000\u0168\u0175\u0001\u0000\u0000\u0000\u0169\u016a"+ - "\u0005?\u0000\u0000\u016a\u016f\u0003T*\u0000\u016b\u016c\u0005\"\u0000"+ - "\u0000\u016c\u016e\u0003T*\u0000\u016d\u016b\u0001\u0000\u0000\u0000\u016e"+ - "\u0171\u0001\u0000\u0000\u0000\u016f\u016d\u0001\u0000\u0000\u0000\u016f"+ - "\u0170\u0001\u0000\u0000\u0000\u0170\u0172\u0001\u0000\u0000\u0000\u0171"+ - "\u016f\u0001\u0000\u0000\u0000\u0172\u0173\u0005@\u0000\u0000\u0173\u0175"+ - "\u0001\u0000\u0000\u0000\u0174\u014a\u0001\u0000\u0000\u0000\u0174\u014b"+ - "\u0001\u0000\u0000\u0000\u0174\u014e\u0001\u0000\u0000\u0000\u0174\u014f"+ - "\u0001\u0000\u0000\u0000\u0174\u0150\u0001\u0000\u0000\u0000\u0174\u0151"+ - "\u0001\u0000\u0000\u0000\u0174\u0152\u0001\u0000\u0000\u0000\u0174\u0153"+ - "\u0001\u0000\u0000\u0000\u0174\u015e\u0001\u0000\u0000\u0000\u0174\u0169"+ - "\u0001\u0000\u0000\u0000\u01753\u0001\u0000\u0000\u0000\u0176\u0177\u0005"+ - "\n\u0000\u0000\u0177\u0178\u0005\u001c\u0000\u0000\u01785\u0001\u0000"+ - "\u0000\u0000\u0179\u017a\u0005\u0010\u0000\u0000\u017a\u017f\u00038\u001c"+ - "\u0000\u017b\u017c\u0005\"\u0000\u0000\u017c\u017e\u00038\u001c\u0000"+ - "\u017d\u017b\u0001\u0000\u0000\u0000\u017e\u0181\u0001\u0000\u0000\u0000"+ - "\u017f\u017d\u0001\u0000\u0000\u0000\u017f\u0180\u0001\u0000\u0000\u0000"+ - "\u01807\u0001\u0000\u0000\u0000\u0181\u017f\u0001\u0000\u0000\u0000\u0182"+ - "\u0184\u0003\n\u0005\u0000\u0183\u0185\u0007\u0005\u0000\u0000\u0184\u0183"+ - "\u0001\u0000\u0000\u0000\u0184\u0185\u0001\u0000\u0000\u0000\u0185\u0188"+ - "\u0001\u0000\u0000\u0000\u0186\u0187\u0005.\u0000\u0000\u0187\u0189\u0007"+ - "\u0006\u0000\u0000\u0188\u0186\u0001\u0000\u0000\u0000\u0188\u0189\u0001"+ - "\u0000\u0000\u0000\u01899\u0001\u0000\u0000\u0000\u018a\u018b\u0005\t"+ - "\u0000\u0000\u018b\u0190\u0003,\u0016\u0000\u018c\u018d\u0005\"\u0000"+ - "\u0000\u018d\u018f\u0003,\u0016\u0000\u018e\u018c\u0001\u0000\u0000\u0000"+ - "\u018f\u0192\u0001\u0000\u0000\u0000\u0190\u018e\u0001\u0000\u0000\u0000"+ - "\u0190\u0191\u0001\u0000\u0000\u0000\u0191\u019d\u0001\u0000\u0000\u0000"+ - "\u0192\u0190\u0001\u0000\u0000\u0000\u0193\u0194\u0005\f\u0000\u0000\u0194"+ - "\u0199\u0003,\u0016\u0000\u0195\u0196\u0005\"\u0000\u0000\u0196\u0198"+ - "\u0003,\u0016\u0000\u0197\u0195\u0001\u0000\u0000\u0000\u0198\u019b\u0001"+ - "\u0000\u0000\u0000\u0199\u0197\u0001\u0000\u0000\u0000\u0199\u019a\u0001"+ - "\u0000\u0000\u0000\u019a\u019d\u0001\u0000\u0000\u0000\u019b\u0199\u0001"+ - "\u0000\u0000\u0000\u019c\u018a\u0001\u0000\u0000\u0000\u019c\u0193\u0001"+ - "\u0000\u0000\u0000\u019d;\u0001\u0000\u0000\u0000\u019e\u019f\u0005\u0002"+ - "\u0000\u0000\u019f\u01a4\u0003,\u0016\u0000\u01a0\u01a1\u0005\"\u0000"+ - "\u0000\u01a1\u01a3\u0003,\u0016\u0000\u01a2\u01a0\u0001\u0000\u0000\u0000"+ - "\u01a3\u01a6\u0001\u0000\u0000\u0000\u01a4\u01a2\u0001\u0000\u0000\u0000"+ - "\u01a4\u01a5\u0001\u0000\u0000\u0000\u01a5=\u0001\u0000\u0000\u0000\u01a6"+ - "\u01a4\u0001\u0000\u0000\u0000\u01a7\u01a8\u0005\r\u0000\u0000\u01a8\u01ad"+ - "\u0003@ \u0000\u01a9\u01aa\u0005\"\u0000\u0000\u01aa\u01ac\u0003@ \u0000"+ - "\u01ab\u01a9\u0001\u0000\u0000\u0000\u01ac\u01af\u0001\u0000\u0000\u0000"+ - "\u01ad\u01ab\u0001\u0000\u0000\u0000\u01ad\u01ae\u0001\u0000\u0000\u0000"+ - "\u01ae?\u0001\u0000\u0000\u0000\u01af\u01ad\u0001\u0000\u0000\u0000\u01b0"+ - "\u01b1\u0003,\u0016\u0000\u01b1\u01b2\u0005O\u0000\u0000\u01b2\u01b3\u0003"+ - ",\u0016\u0000\u01b3A\u0001\u0000\u0000\u0000\u01b4\u01b5\u0005\u0001\u0000"+ - "\u0000\u01b5\u01b6\u0003\u0012\t\u0000\u01b6\u01b8\u0003T*\u0000\u01b7"+ - "\u01b9\u0003H$\u0000\u01b8\u01b7\u0001\u0000\u0000\u0000\u01b8\u01b9\u0001"+ - "\u0000\u0000\u0000\u01b9C\u0001\u0000\u0000\u0000\u01ba\u01bb\u0005\u0007"+ - "\u0000\u0000\u01bb\u01bc\u0003\u0012\t\u0000\u01bc\u01bd\u0003T*\u0000"+ - "\u01bdE\u0001\u0000\u0000\u0000\u01be\u01bf\u0005\u000b\u0000\u0000\u01bf"+ - "\u01c0\u0003*\u0015\u0000\u01c0G\u0001\u0000\u0000\u0000\u01c1\u01c6\u0003"+ - "J%\u0000\u01c2\u01c3\u0005\"\u0000\u0000\u01c3\u01c5\u0003J%\u0000\u01c4"+ - "\u01c2\u0001\u0000\u0000\u0000\u01c5\u01c8\u0001\u0000\u0000\u0000\u01c6"+ - "\u01c4\u0001\u0000\u0000\u0000\u01c6\u01c7\u0001\u0000\u0000\u0000\u01c7"+ - "I\u0001\u0000\u0000\u0000\u01c8\u01c6\u0001\u0000\u0000\u0000\u01c9\u01ca"+ - "\u0003.\u0017\u0000\u01ca\u01cb\u0005!\u0000\u0000\u01cb\u01cc\u00032"+ - "\u0019\u0000\u01ccK\u0001\u0000\u0000\u0000\u01cd\u01ce\u0007\u0007\u0000"+ - "\u0000\u01ceM\u0001\u0000\u0000\u0000\u01cf\u01d2\u0003P(\u0000\u01d0"+ - "\u01d2\u0003R)\u0000\u01d1\u01cf\u0001\u0000\u0000\u0000\u01d1\u01d0\u0001"+ - "\u0000\u0000\u0000\u01d2O\u0001\u0000\u0000\u0000\u01d3\u01d5\u0007\u0000"+ - "\u0000\u0000\u01d4\u01d3\u0001\u0000\u0000\u0000\u01d4\u01d5\u0001\u0000"+ - "\u0000\u0000\u01d5\u01d6\u0001\u0000\u0000\u0000\u01d6\u01d7\u0005\u001d"+ - "\u0000\u0000\u01d7Q\u0001\u0000\u0000\u0000\u01d8\u01da\u0007\u0000\u0000"+ - "\u0000\u01d9\u01d8\u0001\u0000\u0000\u0000\u01d9\u01da\u0001\u0000\u0000"+ - "\u0000\u01da\u01db\u0001\u0000\u0000\u0000\u01db\u01dc\u0005\u001c\u0000"+ - "\u0000\u01dcS\u0001\u0000\u0000\u0000\u01dd\u01de\u0005\u001b\u0000\u0000"+ - "\u01deU\u0001\u0000\u0000\u0000\u01df\u01e0\u0007\b\u0000\u0000\u01e0"+ - "W\u0001\u0000\u0000\u0000\u01e1\u01e2\u0005\u0005\u0000\u0000\u01e2\u01e3"+ - "\u0003Z-\u0000\u01e3Y\u0001\u0000\u0000\u0000\u01e4\u01e5\u0005?\u0000"+ - "\u0000\u01e5\u01e6\u0003\u0002\u0001\u0000\u01e6\u01e7\u0005@\u0000\u0000"+ - "\u01e7[\u0001\u0000\u0000\u0000\u01e8\u01e9\u0005\u000f\u0000\u0000\u01e9"+ - "\u01ed\u0005^\u0000\u0000\u01ea\u01eb\u0005\u000f\u0000\u0000\u01eb\u01ed"+ - "\u0005_\u0000\u0000\u01ec\u01e8\u0001\u0000\u0000\u0000\u01ec\u01ea\u0001"+ - "\u0000\u0000\u0000\u01ed]\u0001\u0000\u0000\u0000\u01ee\u01ef\u0005\u0003"+ - "\u0000\u0000\u01ef\u01f2\u0003(\u0014\u0000\u01f0\u01f1\u0005S\u0000\u0000"+ - "\u01f1\u01f3\u0003,\u0016\u0000\u01f2\u01f0\u0001\u0000\u0000\u0000\u01f2"+ - "\u01f3\u0001\u0000\u0000\u0000\u01f3\u01fd\u0001\u0000\u0000\u0000\u01f4"+ - "\u01f5\u0005T\u0000\u0000\u01f5\u01fa\u0003`0\u0000\u01f6\u01f7\u0005"+ - "\"\u0000\u0000\u01f7\u01f9\u0003`0\u0000\u01f8\u01f6\u0001\u0000\u0000"+ - "\u0000\u01f9\u01fc\u0001\u0000\u0000\u0000\u01fa\u01f8\u0001\u0000\u0000"+ - "\u0000\u01fa\u01fb\u0001\u0000\u0000\u0000\u01fb\u01fe\u0001\u0000\u0000"+ - "\u0000\u01fc\u01fa\u0001\u0000\u0000\u0000\u01fd\u01f4\u0001\u0000\u0000"+ - "\u0000\u01fd\u01fe\u0001\u0000\u0000\u0000\u01fe_\u0001\u0000\u0000\u0000"+ - "\u01ff\u0200\u0003,\u0016\u0000\u0200\u0201\u0005!\u0000\u0000\u0201\u0203"+ - "\u0001\u0000\u0000\u0000\u0202\u01ff\u0001\u0000\u0000\u0000\u0202\u0203"+ - "\u0001\u0000\u0000\u0000\u0203\u0204\u0001\u0000\u0000\u0000\u0204\u0205"+ - "\u0003,\u0016\u0000\u0205a\u0001\u0000\u0000\u00004mt\u0083\u008f\u0098"+ - "\u00a0\u00a4\u00ac\u00ae\u00b3\u00ba\u00bf\u00c6\u00cc\u00d4\u00d6\u00e0"+ - "\u00ea\u00ed\u00f9\u0101\u0109\u010d\u0116\u0120\u0124\u012a\u0131\u013b"+ - "\u0143\u0159\u0164\u016f\u0174\u017f\u0184\u0188\u0190\u0199\u019c\u01a4"+ - "\u01ad\u01b8\u01c6\u01d1\u01d4\u01d9\u01ec\u01f2\u01fa\u01fd\u0202"; + "\u0001\u0019\u0005\u0019\u015a\b\u0019\n\u0019\f\u0019\u015d\t\u0019\u0001"+ + "\u0019\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u0019\u0005"+ + "\u0019\u0165\b\u0019\n\u0019\f\u0019\u0168\t\u0019\u0001\u0019\u0001\u0019"+ + "\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u0019\u0005\u0019\u0170\b\u0019"+ + "\n\u0019\f\u0019\u0173\t\u0019\u0001\u0019\u0001\u0019\u0003\u0019\u0177"+ + "\b\u0019\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001b\u0001\u001b\u0001"+ + "\u001b\u0001\u001b\u0005\u001b\u0180\b\u001b\n\u001b\f\u001b\u0183\t\u001b"+ + "\u0001\u001c\u0001\u001c\u0003\u001c\u0187\b\u001c\u0001\u001c\u0001\u001c"+ + "\u0003\u001c\u018b\b\u001c\u0001\u001d\u0001\u001d\u0001\u001d\u0001\u001d"+ + "\u0005\u001d\u0191\b\u001d\n\u001d\f\u001d\u0194\t\u001d\u0001\u001d\u0001"+ + "\u001d\u0001\u001d\u0001\u001d\u0005\u001d\u019a\b\u001d\n\u001d\f\u001d"+ + "\u019d\t\u001d\u0003\u001d\u019f\b\u001d\u0001\u001e\u0001\u001e\u0001"+ + "\u001e\u0001\u001e\u0005\u001e\u01a5\b\u001e\n\u001e\f\u001e\u01a8\t\u001e"+ + "\u0001\u001f\u0001\u001f\u0001\u001f\u0001\u001f\u0005\u001f\u01ae\b\u001f"+ + "\n\u001f\f\u001f\u01b1\t\u001f\u0001 \u0001 \u0001 \u0001 \u0001!\u0001"+ + "!\u0001!\u0001!\u0003!\u01bb\b!\u0001\"\u0001\"\u0001\"\u0001\"\u0001"+ + "#\u0001#\u0001#\u0001$\u0001$\u0001$\u0005$\u01c7\b$\n$\f$\u01ca\t$\u0001"+ + "%\u0001%\u0001%\u0001%\u0001&\u0001&\u0001\'\u0001\'\u0003\'\u01d4\b\'"+ + "\u0001(\u0003(\u01d7\b(\u0001(\u0001(\u0001)\u0003)\u01dc\b)\u0001)\u0001"+ + ")\u0001*\u0001*\u0001+\u0001+\u0001,\u0001,\u0001,\u0001-\u0001-\u0001"+ + "-\u0001-\u0001.\u0001.\u0001.\u0001.\u0003.\u01ef\b.\u0001/\u0001/\u0005"+ + "/\u01f3\b/\n/\f/\u01f6\t/\u0001/\u0001/\u0001/\u0003/\u01fb\b/\u0001/"+ + "\u0001/\u0001/\u0001/\u0005/\u0201\b/\n/\f/\u0204\t/\u0003/\u0206\b/\u0001"+ + "0\u00010\u00010\u00030\u020b\b0\u00010\u00010\u00011\u00011\u00011\u0001"+ + "1\u00011\u00011\u00011\u0000\u0003\u0002\n\u00102\u0000\u0002\u0004\u0006"+ + "\b\n\f\u000e\u0010\u0012\u0014\u0016\u0018\u001a\u001c\u001e \"$&(*,."+ + "02468:<>@BDFHJLNPRTVXZ\\^`b\u0000\t\u0001\u0000:;\u0001\u0000<>\u0002"+ + "\u0000BBGG\u0001\u0000AB\u0002\u0000BBKK\u0002\u0000 ##\u0001\u0000&"+ + "\'\u0002\u0000%%33\u0001\u000049\u0232\u0000d\u0001\u0000\u0000\u0000"+ + "\u0002g\u0001\u0000\u0000\u0000\u0004v\u0001\u0000\u0000\u0000\u0006\u0085"+ + "\u0001\u0000\u0000\u0000\b\u0087\u0001\u0000\u0000\u0000\n\u00a6\u0001"+ + "\u0000\u0000\u0000\f\u00c1\u0001\u0000\u0000\u0000\u000e\u00c8\u0001\u0000"+ + "\u0000\u0000\u0010\u00ce\u0001\u0000\u0000\u0000\u0012\u00e2\u0001\u0000"+ + "\u0000\u0000\u0014\u00e4\u0001\u0000\u0000\u0000\u0016\u00f3\u0001\u0000"+ + "\u0000\u0000\u0018\u00f6\u0001\u0000\u0000\u0000\u001a\u0103\u0001\u0000"+ + "\u0000\u0000\u001c\u0105\u0001\u0000\u0000\u0000\u001e\u0111\u0001\u0000"+ + "\u0000\u0000 \u011d\u0001\u0000\u0000\u0000\"\u0120\u0001\u0000\u0000"+ + "\u0000$\u0128\u0001\u0000\u0000\u0000&\u012e\u0001\u0000\u0000\u0000("+ + "\u0136\u0001\u0000\u0000\u0000*\u0138\u0001\u0000\u0000\u0000,\u0140\u0001"+ + "\u0000\u0000\u0000.\u0148\u0001\u0000\u0000\u00000\u014a\u0001\u0000\u0000"+ + "\u00002\u0176\u0001\u0000\u0000\u00004\u0178\u0001\u0000\u0000\u00006"+ + "\u017b\u0001\u0000\u0000\u00008\u0184\u0001\u0000\u0000\u0000:\u019e\u0001"+ + "\u0000\u0000\u0000<\u01a0\u0001\u0000\u0000\u0000>\u01a9\u0001\u0000\u0000"+ + "\u0000@\u01b2\u0001\u0000\u0000\u0000B\u01b6\u0001\u0000\u0000\u0000D"+ + "\u01bc\u0001\u0000\u0000\u0000F\u01c0\u0001\u0000\u0000\u0000H\u01c3\u0001"+ + "\u0000\u0000\u0000J\u01cb\u0001\u0000\u0000\u0000L\u01cf\u0001\u0000\u0000"+ + "\u0000N\u01d3\u0001\u0000\u0000\u0000P\u01d6\u0001\u0000\u0000\u0000R"+ + "\u01db\u0001\u0000\u0000\u0000T\u01df\u0001\u0000\u0000\u0000V\u01e1\u0001"+ + "\u0000\u0000\u0000X\u01e3\u0001\u0000\u0000\u0000Z\u01e6\u0001\u0000\u0000"+ + "\u0000\\\u01ee\u0001\u0000\u0000\u0000^\u01f0\u0001\u0000\u0000\u0000"+ + "`\u020a\u0001\u0000\u0000\u0000b\u020e\u0001\u0000\u0000\u0000de\u0003"+ + "\u0002\u0001\u0000ef\u0005\u0000\u0000\u0001f\u0001\u0001\u0000\u0000"+ + "\u0000gh\u0006\u0001\uffff\uffff\u0000hi\u0003\u0004\u0002\u0000io\u0001"+ + "\u0000\u0000\u0000jk\n\u0001\u0000\u0000kl\u0005\u001a\u0000\u0000ln\u0003"+ + "\u0006\u0003\u0000mj\u0001\u0000\u0000\u0000nq\u0001\u0000\u0000\u0000"+ + "om\u0001\u0000\u0000\u0000op\u0001\u0000\u0000\u0000p\u0003\u0001\u0000"+ + "\u0000\u0000qo\u0001\u0000\u0000\u0000rw\u0003X,\u0000sw\u0003\u001c\u000e"+ + "\u0000tw\u0003\u0016\u000b\u0000uw\u0003\\.\u0000vr\u0001\u0000\u0000"+ + "\u0000vs\u0001\u0000\u0000\u0000vt\u0001\u0000\u0000\u0000vu\u0001\u0000"+ + "\u0000\u0000w\u0005\u0001\u0000\u0000\u0000x\u0086\u0003 \u0010\u0000"+ + "y\u0086\u0003$\u0012\u0000z\u0086\u00034\u001a\u0000{\u0086\u0003:\u001d"+ + "\u0000|\u0086\u00036\u001b\u0000}\u0086\u0003\"\u0011\u0000~\u0086\u0003"+ + "\b\u0004\u0000\u007f\u0086\u0003<\u001e\u0000\u0080\u0086\u0003>\u001f"+ + "\u0000\u0081\u0086\u0003B!\u0000\u0082\u0086\u0003D\"\u0000\u0083\u0086"+ + "\u0003^/\u0000\u0084\u0086\u0003F#\u0000\u0085x\u0001\u0000\u0000\u0000"+ + "\u0085y\u0001\u0000\u0000\u0000\u0085z\u0001\u0000\u0000\u0000\u0085{"+ + "\u0001\u0000\u0000\u0000\u0085|\u0001\u0000\u0000\u0000\u0085}\u0001\u0000"+ + "\u0000\u0000\u0085~\u0001\u0000\u0000\u0000\u0085\u007f\u0001\u0000\u0000"+ + "\u0000\u0085\u0080\u0001\u0000\u0000\u0000\u0085\u0081\u0001\u0000\u0000"+ + "\u0000\u0085\u0082\u0001\u0000\u0000\u0000\u0085\u0083\u0001\u0000\u0000"+ + "\u0000\u0085\u0084\u0001\u0000\u0000\u0000\u0086\u0007\u0001\u0000\u0000"+ + "\u0000\u0087\u0088\u0005\u0012\u0000\u0000\u0088\u0089\u0003\n\u0005\u0000"+ + "\u0089\t\u0001\u0000\u0000\u0000\u008a\u008b\u0006\u0005\uffff\uffff\u0000"+ + "\u008b\u008c\u0005,\u0000\u0000\u008c\u00a7\u0003\n\u0005\u0007\u008d"+ + "\u00a7\u0003\u000e\u0007\u0000\u008e\u00a7\u0003\f\u0006\u0000\u008f\u0091"+ + "\u0003\u000e\u0007\u0000\u0090\u0092\u0005,\u0000\u0000\u0091\u0090\u0001"+ + "\u0000\u0000\u0000\u0091\u0092\u0001\u0000\u0000\u0000\u0092\u0093\u0001"+ + "\u0000\u0000\u0000\u0093\u0094\u0005)\u0000\u0000\u0094\u0095\u0005(\u0000"+ + "\u0000\u0095\u009a\u0003\u000e\u0007\u0000\u0096\u0097\u0005\"\u0000\u0000"+ + "\u0097\u0099\u0003\u000e\u0007\u0000\u0098\u0096\u0001\u0000\u0000\u0000"+ + "\u0099\u009c\u0001\u0000\u0000\u0000\u009a\u0098\u0001\u0000\u0000\u0000"+ + "\u009a\u009b\u0001\u0000\u0000\u0000\u009b\u009d\u0001\u0000\u0000\u0000"+ + "\u009c\u009a\u0001\u0000\u0000\u0000\u009d\u009e\u00052\u0000\u0000\u009e"+ + "\u00a7\u0001\u0000\u0000\u0000\u009f\u00a0\u0003\u000e\u0007\u0000\u00a0"+ + "\u00a2\u0005*\u0000\u0000\u00a1\u00a3\u0005,\u0000\u0000\u00a2\u00a1\u0001"+ + "\u0000\u0000\u0000\u00a2\u00a3\u0001\u0000\u0000\u0000\u00a3\u00a4\u0001"+ + "\u0000\u0000\u0000\u00a4\u00a5\u0005-\u0000\u0000\u00a5\u00a7\u0001\u0000"+ + "\u0000\u0000\u00a6\u008a\u0001\u0000\u0000\u0000\u00a6\u008d\u0001\u0000"+ + "\u0000\u0000\u00a6\u008e\u0001\u0000\u0000\u0000\u00a6\u008f\u0001\u0000"+ + "\u0000\u0000\u00a6\u009f\u0001\u0000\u0000\u0000\u00a7\u00b0\u0001\u0000"+ + "\u0000\u0000\u00a8\u00a9\n\u0004\u0000\u0000\u00a9\u00aa\u0005\u001f\u0000"+ + "\u0000\u00aa\u00af\u0003\n\u0005\u0005\u00ab\u00ac\n\u0003\u0000\u0000"+ + "\u00ac\u00ad\u0005/\u0000\u0000\u00ad\u00af\u0003\n\u0005\u0004\u00ae"+ + "\u00a8\u0001\u0000\u0000\u0000\u00ae\u00ab\u0001\u0000\u0000\u0000\u00af"+ + "\u00b2\u0001\u0000\u0000\u0000\u00b0\u00ae\u0001\u0000\u0000\u0000\u00b0"+ + "\u00b1\u0001\u0000\u0000\u0000\u00b1\u000b\u0001\u0000\u0000\u0000\u00b2"+ + "\u00b0\u0001\u0000\u0000\u0000\u00b3\u00b5\u0003\u000e\u0007\u0000\u00b4"+ + "\u00b6\u0005,\u0000\u0000\u00b5\u00b4\u0001\u0000\u0000\u0000\u00b5\u00b6"+ + "\u0001\u0000\u0000\u0000\u00b6\u00b7\u0001\u0000\u0000\u0000\u00b7\u00b8"+ + "\u0005+\u0000\u0000\u00b8\u00b9\u0003T*\u0000\u00b9\u00c2\u0001\u0000"+ + "\u0000\u0000\u00ba\u00bc\u0003\u000e\u0007\u0000\u00bb\u00bd\u0005,\u0000"+ + "\u0000\u00bc\u00bb\u0001\u0000\u0000\u0000\u00bc\u00bd\u0001\u0000\u0000"+ + "\u0000\u00bd\u00be\u0001\u0000\u0000\u0000\u00be\u00bf\u00051\u0000\u0000"+ + "\u00bf\u00c0\u0003T*\u0000\u00c0\u00c2\u0001\u0000\u0000\u0000\u00c1\u00b3"+ + "\u0001\u0000\u0000\u0000\u00c1\u00ba\u0001\u0000\u0000\u0000\u00c2\r\u0001"+ + "\u0000\u0000\u0000\u00c3\u00c9\u0003\u0010\b\u0000\u00c4\u00c5\u0003\u0010"+ + "\b\u0000\u00c5\u00c6\u0003V+\u0000\u00c6\u00c7\u0003\u0010\b\u0000\u00c7"+ + "\u00c9\u0001\u0000\u0000\u0000\u00c8\u00c3\u0001\u0000\u0000\u0000\u00c8"+ + "\u00c4\u0001\u0000\u0000\u0000\u00c9\u000f\u0001\u0000\u0000\u0000\u00ca"+ + "\u00cb\u0006\b\uffff\uffff\u0000\u00cb\u00cf\u0003\u0012\t\u0000\u00cc"+ + "\u00cd\u0007\u0000\u0000\u0000\u00cd\u00cf\u0003\u0010\b\u0003\u00ce\u00ca"+ + "\u0001\u0000\u0000\u0000\u00ce\u00cc\u0001\u0000\u0000\u0000\u00cf\u00d8"+ + "\u0001\u0000\u0000\u0000\u00d0\u00d1\n\u0002\u0000\u0000\u00d1\u00d2\u0007"+ + "\u0001\u0000\u0000\u00d2\u00d7\u0003\u0010\b\u0003\u00d3\u00d4\n\u0001"+ + "\u0000\u0000\u00d4\u00d5\u0007\u0000\u0000\u0000\u00d5\u00d7\u0003\u0010"+ + "\b\u0002\u00d6\u00d0\u0001\u0000\u0000\u0000\u00d6\u00d3\u0001\u0000\u0000"+ + "\u0000\u00d7\u00da\u0001\u0000\u0000\u0000\u00d8\u00d6\u0001\u0000\u0000"+ + "\u0000\u00d8\u00d9\u0001\u0000\u0000\u0000\u00d9\u0011\u0001\u0000\u0000"+ + "\u0000\u00da\u00d8\u0001\u0000\u0000\u0000\u00db\u00e3\u00032\u0019\u0000"+ + "\u00dc\u00e3\u0003*\u0015\u0000\u00dd\u00e3\u0003\u0014\n\u0000\u00de"+ + "\u00df\u0005(\u0000\u0000\u00df\u00e0\u0003\n\u0005\u0000\u00e0\u00e1"+ + "\u00052\u0000\u0000\u00e1\u00e3\u0001\u0000\u0000\u0000\u00e2\u00db\u0001"+ + "\u0000\u0000\u0000\u00e2\u00dc\u0001\u0000\u0000\u0000\u00e2\u00dd\u0001"+ + "\u0000\u0000\u0000\u00e2\u00de\u0001\u0000\u0000\u0000\u00e3\u0013\u0001"+ + "\u0000\u0000\u0000\u00e4\u00e5\u0003.\u0017\u0000\u00e5\u00ef\u0005(\u0000"+ + "\u0000\u00e6\u00f0\u0005<\u0000\u0000\u00e7\u00ec\u0003\n\u0005\u0000"+ + "\u00e8\u00e9\u0005\"\u0000\u0000\u00e9\u00eb\u0003\n\u0005\u0000\u00ea"+ + "\u00e8\u0001\u0000\u0000\u0000\u00eb\u00ee\u0001\u0000\u0000\u0000\u00ec"+ + "\u00ea\u0001\u0000\u0000\u0000\u00ec\u00ed\u0001\u0000\u0000\u0000\u00ed"+ + "\u00f0\u0001\u0000\u0000\u0000\u00ee\u00ec\u0001\u0000\u0000\u0000\u00ef"+ + "\u00e6\u0001\u0000\u0000\u0000\u00ef\u00e7\u0001\u0000\u0000\u0000\u00ef"+ + "\u00f0\u0001\u0000\u0000\u0000\u00f0\u00f1\u0001\u0000\u0000\u0000\u00f1"+ + "\u00f2\u00052\u0000\u0000\u00f2\u0015\u0001\u0000\u0000\u0000\u00f3\u00f4"+ + "\u0005\u000e\u0000\u0000\u00f4\u00f5\u0003\u0018\f\u0000\u00f5\u0017\u0001"+ + "\u0000\u0000\u0000\u00f6\u00fb\u0003\u001a\r\u0000\u00f7\u00f8\u0005\""+ + "\u0000\u0000\u00f8\u00fa\u0003\u001a\r\u0000\u00f9\u00f7\u0001\u0000\u0000"+ + "\u0000\u00fa\u00fd\u0001\u0000\u0000\u0000\u00fb\u00f9\u0001\u0000\u0000"+ + "\u0000\u00fb\u00fc\u0001\u0000\u0000\u0000\u00fc\u0019\u0001\u0000\u0000"+ + "\u0000\u00fd\u00fb\u0001\u0000\u0000\u0000\u00fe\u0104\u0003\n\u0005\u0000"+ + "\u00ff\u0100\u0003*\u0015\u0000\u0100\u0101\u0005!\u0000\u0000\u0101\u0102"+ + "\u0003\n\u0005\u0000\u0102\u0104\u0001\u0000\u0000\u0000\u0103\u00fe\u0001"+ + "\u0000\u0000\u0000\u0103\u00ff\u0001\u0000\u0000\u0000\u0104\u001b\u0001"+ + "\u0000\u0000\u0000\u0105\u0106\u0005\u0006\u0000\u0000\u0106\u010b\u0003"+ + "(\u0014\u0000\u0107\u0108\u0005\"\u0000\u0000\u0108\u010a\u0003(\u0014"+ + "\u0000\u0109\u0107\u0001\u0000\u0000\u0000\u010a\u010d\u0001\u0000\u0000"+ + "\u0000\u010b\u0109\u0001\u0000\u0000\u0000\u010b\u010c\u0001\u0000\u0000"+ + "\u0000\u010c\u010f\u0001\u0000\u0000\u0000\u010d\u010b\u0001\u0000\u0000"+ + "\u0000\u010e\u0110\u0003\u001e\u000f\u0000\u010f\u010e\u0001\u0000\u0000"+ + "\u0000\u010f\u0110\u0001\u0000\u0000\u0000\u0110\u001d\u0001\u0000\u0000"+ + "\u0000\u0111\u0112\u0005?\u0000\u0000\u0112\u0113\u0005F\u0000\u0000\u0113"+ + "\u0118\u0003(\u0014\u0000\u0114\u0115\u0005\"\u0000\u0000\u0115\u0117"+ + "\u0003(\u0014\u0000\u0116\u0114\u0001\u0000\u0000\u0000\u0117\u011a\u0001"+ + "\u0000\u0000\u0000\u0118\u0116\u0001\u0000\u0000\u0000\u0118\u0119\u0001"+ + "\u0000\u0000\u0000\u0119\u011b\u0001\u0000\u0000\u0000\u011a\u0118\u0001"+ + "\u0000\u0000\u0000\u011b\u011c\u0005@\u0000\u0000\u011c\u001f\u0001\u0000"+ + "\u0000\u0000\u011d\u011e\u0005\u0004\u0000\u0000\u011e\u011f\u0003\u0018"+ + "\f\u0000\u011f!\u0001\u0000\u0000\u0000\u0120\u0122\u0005\u0011\u0000"+ + "\u0000\u0121\u0123\u0003\u0018\f\u0000\u0122\u0121\u0001\u0000\u0000\u0000"+ + "\u0122\u0123\u0001\u0000\u0000\u0000\u0123\u0126\u0001\u0000\u0000\u0000"+ + "\u0124\u0125\u0005\u001e\u0000\u0000\u0125\u0127\u0003&\u0013\u0000\u0126"+ + "\u0124\u0001\u0000\u0000\u0000\u0126\u0127\u0001\u0000\u0000\u0000\u0127"+ + "#\u0001\u0000\u0000\u0000\u0128\u0129\u0005\b\u0000\u0000\u0129\u012c"+ + "\u0003\u0018\f\u0000\u012a\u012b\u0005\u001e\u0000\u0000\u012b\u012d\u0003"+ + "&\u0013\u0000\u012c\u012a\u0001\u0000\u0000\u0000\u012c\u012d\u0001\u0000"+ + "\u0000\u0000\u012d%\u0001\u0000\u0000\u0000\u012e\u0133\u0003*\u0015\u0000"+ + "\u012f\u0130\u0005\"\u0000\u0000\u0130\u0132\u0003*\u0015\u0000\u0131"+ + "\u012f\u0001\u0000\u0000\u0000\u0132\u0135\u0001\u0000\u0000\u0000\u0133"+ + "\u0131\u0001\u0000\u0000\u0000\u0133\u0134\u0001\u0000\u0000\u0000\u0134"+ + "\'\u0001\u0000\u0000\u0000\u0135\u0133\u0001\u0000\u0000\u0000\u0136\u0137"+ + "\u0007\u0002\u0000\u0000\u0137)\u0001\u0000\u0000\u0000\u0138\u013d\u0003"+ + ".\u0017\u0000\u0139\u013a\u0005$\u0000\u0000\u013a\u013c\u0003.\u0017"+ + "\u0000\u013b\u0139\u0001\u0000\u0000\u0000\u013c\u013f\u0001\u0000\u0000"+ + "\u0000\u013d\u013b\u0001\u0000\u0000\u0000\u013d\u013e\u0001\u0000\u0000"+ + "\u0000\u013e+\u0001\u0000\u0000\u0000\u013f\u013d\u0001\u0000\u0000\u0000"+ + "\u0140\u0145\u00030\u0018\u0000\u0141\u0142\u0005$\u0000\u0000\u0142\u0144"+ + "\u00030\u0018\u0000\u0143\u0141\u0001\u0000\u0000\u0000\u0144\u0147\u0001"+ + "\u0000\u0000\u0000\u0145\u0143\u0001\u0000\u0000\u0000\u0145\u0146\u0001"+ + "\u0000\u0000\u0000\u0146-\u0001\u0000\u0000\u0000\u0147\u0145\u0001\u0000"+ + "\u0000\u0000\u0148\u0149\u0007\u0003\u0000\u0000\u0149/\u0001\u0000\u0000"+ + "\u0000\u014a\u014b\u0007\u0004\u0000\u0000\u014b1\u0001\u0000\u0000\u0000"+ + "\u014c\u0177\u0005-\u0000\u0000\u014d\u014e\u0003R)\u0000\u014e\u014f"+ + "\u0005A\u0000\u0000\u014f\u0177\u0001\u0000\u0000\u0000\u0150\u0177\u0003"+ + "P(\u0000\u0151\u0177\u0003R)\u0000\u0152\u0177\u0003L&\u0000\u0153\u0177"+ + "\u00050\u0000\u0000\u0154\u0177\u0003T*\u0000\u0155\u0156\u0005?\u0000"+ + "\u0000\u0156\u015b\u0003N\'\u0000\u0157\u0158\u0005\"\u0000\u0000\u0158"+ + "\u015a\u0003N\'\u0000\u0159\u0157\u0001\u0000\u0000\u0000\u015a\u015d"+ + "\u0001\u0000\u0000\u0000\u015b\u0159\u0001\u0000\u0000\u0000\u015b\u015c"+ + "\u0001\u0000\u0000\u0000\u015c\u015e\u0001\u0000\u0000\u0000\u015d\u015b"+ + "\u0001\u0000\u0000\u0000\u015e\u015f\u0005@\u0000\u0000\u015f\u0177\u0001"+ + "\u0000\u0000\u0000\u0160\u0161\u0005?\u0000\u0000\u0161\u0166\u0003L&"+ + "\u0000\u0162\u0163\u0005\"\u0000\u0000\u0163\u0165\u0003L&\u0000\u0164"+ + "\u0162\u0001\u0000\u0000\u0000\u0165\u0168\u0001\u0000\u0000\u0000\u0166"+ + "\u0164\u0001\u0000\u0000\u0000\u0166\u0167\u0001\u0000\u0000\u0000\u0167"+ + "\u0169\u0001\u0000\u0000\u0000\u0168\u0166\u0001\u0000\u0000\u0000\u0169"+ + "\u016a\u0005@\u0000\u0000\u016a\u0177\u0001\u0000\u0000\u0000\u016b\u016c"+ + "\u0005?\u0000\u0000\u016c\u0171\u0003T*\u0000\u016d\u016e\u0005\"\u0000"+ + "\u0000\u016e\u0170\u0003T*\u0000\u016f\u016d\u0001\u0000\u0000\u0000\u0170"+ + "\u0173\u0001\u0000\u0000\u0000\u0171\u016f\u0001\u0000\u0000\u0000\u0171"+ + "\u0172\u0001\u0000\u0000\u0000\u0172\u0174\u0001\u0000\u0000\u0000\u0173"+ + "\u0171\u0001\u0000\u0000\u0000\u0174\u0175\u0005@\u0000\u0000\u0175\u0177"+ + "\u0001\u0000\u0000\u0000\u0176\u014c\u0001\u0000\u0000\u0000\u0176\u014d"+ + "\u0001\u0000\u0000\u0000\u0176\u0150\u0001\u0000\u0000\u0000\u0176\u0151"+ + "\u0001\u0000\u0000\u0000\u0176\u0152\u0001\u0000\u0000\u0000\u0176\u0153"+ + "\u0001\u0000\u0000\u0000\u0176\u0154\u0001\u0000\u0000\u0000\u0176\u0155"+ + "\u0001\u0000\u0000\u0000\u0176\u0160\u0001\u0000\u0000\u0000\u0176\u016b"+ + "\u0001\u0000\u0000\u0000\u01773\u0001\u0000\u0000\u0000\u0178\u0179\u0005"+ + "\n\u0000\u0000\u0179\u017a\u0005\u001c\u0000\u0000\u017a5\u0001\u0000"+ + "\u0000\u0000\u017b\u017c\u0005\u0010\u0000\u0000\u017c\u0181\u00038\u001c"+ + "\u0000\u017d\u017e\u0005\"\u0000\u0000\u017e\u0180\u00038\u001c\u0000"+ + "\u017f\u017d\u0001\u0000\u0000\u0000\u0180\u0183\u0001\u0000\u0000\u0000"+ + "\u0181\u017f\u0001\u0000\u0000\u0000\u0181\u0182\u0001\u0000\u0000\u0000"+ + "\u01827\u0001\u0000\u0000\u0000\u0183\u0181\u0001\u0000\u0000\u0000\u0184"+ + "\u0186\u0003\n\u0005\u0000\u0185\u0187\u0007\u0005\u0000\u0000\u0186\u0185"+ + "\u0001\u0000\u0000\u0000\u0186\u0187\u0001\u0000\u0000\u0000\u0187\u018a"+ + "\u0001\u0000\u0000\u0000\u0188\u0189\u0005.\u0000\u0000\u0189\u018b\u0007"+ + "\u0006\u0000\u0000\u018a\u0188\u0001\u0000\u0000\u0000\u018a\u018b\u0001"+ + "\u0000\u0000\u0000\u018b9\u0001\u0000\u0000\u0000\u018c\u018d\u0005\t"+ + "\u0000\u0000\u018d\u0192\u0003,\u0016\u0000\u018e\u018f\u0005\"\u0000"+ + "\u0000\u018f\u0191\u0003,\u0016\u0000\u0190\u018e\u0001\u0000\u0000\u0000"+ + "\u0191\u0194\u0001\u0000\u0000\u0000\u0192\u0190\u0001\u0000\u0000\u0000"+ + "\u0192\u0193\u0001\u0000\u0000\u0000\u0193\u019f\u0001\u0000\u0000\u0000"+ + "\u0194\u0192\u0001\u0000\u0000\u0000\u0195\u0196\u0005\f\u0000\u0000\u0196"+ + "\u019b\u0003,\u0016\u0000\u0197\u0198\u0005\"\u0000\u0000\u0198\u019a"+ + "\u0003,\u0016\u0000\u0199\u0197\u0001\u0000\u0000\u0000\u019a\u019d\u0001"+ + "\u0000\u0000\u0000\u019b\u0199\u0001\u0000\u0000\u0000\u019b\u019c\u0001"+ + "\u0000\u0000\u0000\u019c\u019f\u0001\u0000\u0000\u0000\u019d\u019b\u0001"+ + "\u0000\u0000\u0000\u019e\u018c\u0001\u0000\u0000\u0000\u019e\u0195\u0001"+ + "\u0000\u0000\u0000\u019f;\u0001\u0000\u0000\u0000\u01a0\u01a1\u0005\u0002"+ + "\u0000\u0000\u01a1\u01a6\u0003,\u0016\u0000\u01a2\u01a3\u0005\"\u0000"+ + "\u0000\u01a3\u01a5\u0003,\u0016\u0000\u01a4\u01a2\u0001\u0000\u0000\u0000"+ + "\u01a5\u01a8\u0001\u0000\u0000\u0000\u01a6\u01a4\u0001\u0000\u0000\u0000"+ + "\u01a6\u01a7\u0001\u0000\u0000\u0000\u01a7=\u0001\u0000\u0000\u0000\u01a8"+ + "\u01a6\u0001\u0000\u0000\u0000\u01a9\u01aa\u0005\r\u0000\u0000\u01aa\u01af"+ + "\u0003@ \u0000\u01ab\u01ac\u0005\"\u0000\u0000\u01ac\u01ae\u0003@ \u0000"+ + "\u01ad\u01ab\u0001\u0000\u0000\u0000\u01ae\u01b1\u0001\u0000\u0000\u0000"+ + "\u01af\u01ad\u0001\u0000\u0000\u0000\u01af\u01b0\u0001\u0000\u0000\u0000"+ + "\u01b0?\u0001\u0000\u0000\u0000\u01b1\u01af\u0001\u0000\u0000\u0000\u01b2"+ + "\u01b3\u0003,\u0016\u0000\u01b3\u01b4\u0005O\u0000\u0000\u01b4\u01b5\u0003"+ + ",\u0016\u0000\u01b5A\u0001\u0000\u0000\u0000\u01b6\u01b7\u0005\u0001\u0000"+ + "\u0000\u01b7\u01b8\u0003\u0012\t\u0000\u01b8\u01ba\u0003T*\u0000\u01b9"+ + "\u01bb\u0003H$\u0000\u01ba\u01b9\u0001\u0000\u0000\u0000\u01ba\u01bb\u0001"+ + "\u0000\u0000\u0000\u01bbC\u0001\u0000\u0000\u0000\u01bc\u01bd\u0005\u0007"+ + "\u0000\u0000\u01bd\u01be\u0003\u0012\t\u0000\u01be\u01bf\u0003T*\u0000"+ + "\u01bfE\u0001\u0000\u0000\u0000\u01c0\u01c1\u0005\u000b\u0000\u0000\u01c1"+ + "\u01c2\u0003*\u0015\u0000\u01c2G\u0001\u0000\u0000\u0000\u01c3\u01c8\u0003"+ + "J%\u0000\u01c4\u01c5\u0005\"\u0000\u0000\u01c5\u01c7\u0003J%\u0000\u01c6"+ + "\u01c4\u0001\u0000\u0000\u0000\u01c7\u01ca\u0001\u0000\u0000\u0000\u01c8"+ + "\u01c6\u0001\u0000\u0000\u0000\u01c8\u01c9\u0001\u0000\u0000\u0000\u01c9"+ + "I\u0001\u0000\u0000\u0000\u01ca\u01c8\u0001\u0000\u0000\u0000\u01cb\u01cc"+ + "\u0003.\u0017\u0000\u01cc\u01cd\u0005!\u0000\u0000\u01cd\u01ce\u00032"+ + "\u0019\u0000\u01ceK\u0001\u0000\u0000\u0000\u01cf\u01d0\u0007\u0007\u0000"+ + "\u0000\u01d0M\u0001\u0000\u0000\u0000\u01d1\u01d4\u0003P(\u0000\u01d2"+ + "\u01d4\u0003R)\u0000\u01d3\u01d1\u0001\u0000\u0000\u0000\u01d3\u01d2\u0001"+ + "\u0000\u0000\u0000\u01d4O\u0001\u0000\u0000\u0000\u01d5\u01d7\u0007\u0000"+ + "\u0000\u0000\u01d6\u01d5\u0001\u0000\u0000\u0000\u01d6\u01d7\u0001\u0000"+ + "\u0000\u0000\u01d7\u01d8\u0001\u0000\u0000\u0000\u01d8\u01d9\u0005\u001d"+ + "\u0000\u0000\u01d9Q\u0001\u0000\u0000\u0000\u01da\u01dc\u0007\u0000\u0000"+ + "\u0000\u01db\u01da\u0001\u0000\u0000\u0000\u01db\u01dc\u0001\u0000\u0000"+ + "\u0000\u01dc\u01dd\u0001\u0000\u0000\u0000\u01dd\u01de\u0005\u001c\u0000"+ + "\u0000\u01deS\u0001\u0000\u0000\u0000\u01df\u01e0\u0005\u001b\u0000\u0000"+ + "\u01e0U\u0001\u0000\u0000\u0000\u01e1\u01e2\u0007\b\u0000\u0000\u01e2"+ + "W\u0001\u0000\u0000\u0000\u01e3\u01e4\u0005\u0005\u0000\u0000\u01e4\u01e5"+ + "\u0003Z-\u0000\u01e5Y\u0001\u0000\u0000\u0000\u01e6\u01e7\u0005?\u0000"+ + "\u0000\u01e7\u01e8\u0003\u0002\u0001\u0000\u01e8\u01e9\u0005@\u0000\u0000"+ + "\u01e9[\u0001\u0000\u0000\u0000\u01ea\u01eb\u0005\u000f\u0000\u0000\u01eb"+ + "\u01ef\u0005_\u0000\u0000\u01ec\u01ed\u0005\u000f\u0000\u0000\u01ed\u01ef"+ + "\u0005`\u0000\u0000\u01ee\u01ea\u0001\u0000\u0000\u0000\u01ee\u01ec\u0001"+ + "\u0000\u0000\u0000\u01ef]\u0001\u0000\u0000\u0000\u01f0\u01f4\u0005\u0003"+ + "\u0000\u0000\u01f1\u01f3\u0003b1\u0000\u01f2\u01f1\u0001\u0000\u0000\u0000"+ + "\u01f3\u01f6\u0001\u0000\u0000\u0000\u01f4\u01f2\u0001\u0000\u0000\u0000"+ + "\u01f4\u01f5\u0001\u0000\u0000\u0000\u01f5\u01f7\u0001\u0000\u0000\u0000"+ + "\u01f6\u01f4\u0001\u0000\u0000\u0000\u01f7\u01fa\u0005U\u0000\u0000\u01f8"+ + "\u01f9\u0005S\u0000\u0000\u01f9\u01fb\u0003,\u0016\u0000\u01fa\u01f8\u0001"+ + "\u0000\u0000\u0000\u01fa\u01fb\u0001\u0000\u0000\u0000\u01fb\u0205\u0001"+ + "\u0000\u0000\u0000\u01fc\u01fd\u0005T\u0000\u0000\u01fd\u0202\u0003`0"+ + "\u0000\u01fe\u01ff\u0005\"\u0000\u0000\u01ff\u0201\u0003`0\u0000\u0200"+ + "\u01fe\u0001\u0000\u0000\u0000\u0201\u0204\u0001\u0000\u0000\u0000\u0202"+ + "\u0200\u0001\u0000\u0000\u0000\u0202\u0203\u0001\u0000\u0000\u0000\u0203"+ + "\u0206\u0001\u0000\u0000\u0000\u0204\u0202\u0001\u0000\u0000\u0000\u0205"+ + "\u01fc\u0001\u0000\u0000\u0000\u0205\u0206\u0001\u0000\u0000\u0000\u0206"+ + "_\u0001\u0000\u0000\u0000\u0207\u0208\u0003,\u0016\u0000\u0208\u0209\u0005"+ + "!\u0000\u0000\u0209\u020b\u0001\u0000\u0000\u0000\u020a\u0207\u0001\u0000"+ + "\u0000\u0000\u020a\u020b\u0001\u0000\u0000\u0000\u020b\u020c\u0001\u0000"+ + "\u0000\u0000\u020c\u020d\u0003,\u0016\u0000\u020da\u0001\u0000\u0000\u0000"+ + "\u020e\u020f\u0005?\u0000\u0000\u020f\u0210\u0005e\u0000\u0000\u0210\u0211"+ + "\u0005d\u0000\u0000\u0211\u0212\u0005e\u0000\u0000\u0212\u0213\u0005@"+ + "\u0000\u0000\u0213c\u0001\u0000\u0000\u00005ov\u0085\u0091\u009a\u00a2"+ + "\u00a6\u00ae\u00b0\u00b5\u00bc\u00c1\u00c8\u00ce\u00d6\u00d8\u00e2\u00ec"+ + "\u00ef\u00fb\u0103\u010b\u010f\u0118\u0122\u0126\u012c\u0133\u013d\u0145"+ + "\u015b\u0166\u0171\u0176\u0181\u0186\u018a\u0192\u019b\u019e\u01a6\u01af"+ + "\u01ba\u01c8\u01d3\u01d6\u01db\u01ee\u01f4\u01fa\u0202\u0205\u020a"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java index 73b529cd2be92..c8b86b75d6c16 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java @@ -864,6 +864,18 @@ public class EsqlBaseParserBaseListener implements EsqlBaseParserListener { *

The default implementation does nothing.

*/ @Override public void exitEnrichWithClause(EsqlBaseParser.EnrichWithClauseContext ctx) { } + /** + * {@inheritDoc} + * + *

The default implementation does nothing.

+ */ + @Override public void enterSetting(EsqlBaseParser.SettingContext ctx) { } + /** + * {@inheritDoc} + * + *

The default implementation does nothing.

+ */ + @Override public void exitSetting(EsqlBaseParser.SettingContext ctx) { } /** * {@inheritDoc} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java index d35481745cecc..1b825dd9c212f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java @@ -509,4 +509,11 @@ public class EsqlBaseParserBaseVisitor extends AbstractParseTreeVisitor im * {@link #visitChildren} on {@code ctx}.

*/ @Override public T visitEnrichWithClause(EsqlBaseParser.EnrichWithClauseContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitSetting(EsqlBaseParser.SettingContext ctx) { return visitChildren(ctx); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java index 6c8cd7272d8dc..89c2e39b65f8d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java @@ -775,4 +775,14 @@ public interface EsqlBaseParserListener extends ParseTreeListener { * @param ctx the parse tree */ void exitEnrichWithClause(EsqlBaseParser.EnrichWithClauseContext ctx); + /** + * Enter a parse tree produced by {@link EsqlBaseParser#setting}. + * @param ctx the parse tree + */ + void enterSetting(EsqlBaseParser.SettingContext ctx); + /** + * Exit a parse tree produced by {@link EsqlBaseParser#setting}. + * @param ctx the parse tree + */ + void exitSetting(EsqlBaseParser.SettingContext ctx); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java index 2fe5de566dbaf..0fc4fecc4a2df 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java @@ -465,4 +465,10 @@ public interface EsqlBaseParserVisitor extends ParseTreeVisitor { * @return the visitor result */ T visitEnrichWithClause(EsqlBaseParser.EnrichWithClauseContext ctx); + /** + * Visit a parse tree produced by {@link EsqlBaseParser#setting}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitSetting(EsqlBaseParser.SettingContext ctx); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/IdentifierBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/IdentifierBuilder.java index 2039dc633f6cf..7541326c172ef 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/IdentifierBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/IdentifierBuilder.java @@ -25,7 +25,7 @@ public String visitIdentifier(IdentifierContext ctx) { @Override public String visitIdentifierPattern(EsqlBaseParser.IdentifierPatternContext ctx) { - return unquoteIdentifier(ctx.QUOTED_IDENTIFIER(), ctx.PROJECT_UNQUOTED_IDENTIFIER()); + return unquoteIdentifier(ctx.QUOTED_IDENTIFIER(), ctx.UNQUOTED_ID_PATTERN()); } @Override @@ -33,7 +33,7 @@ public String visitFromIdentifier(FromIdentifierContext ctx) { return ctx == null ? null : unquoteIdentifier(ctx.QUOTED_IDENTIFIER(), ctx.FROM_UNQUOTED_IDENTIFIER()); } - static String unquoteIdentifier(TerminalNode quotedNode, TerminalNode unquotedNode) { + protected static String unquoteIdentifier(TerminalNode quotedNode, TerminalNode unquotedNode) { String result; if (quotedNode != null) { String identifier = quotedNode.getText(); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java index f9d1a252afe42..5e90f6e8e44c9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java @@ -54,11 +54,13 @@ import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.function.Function; import static org.elasticsearch.common.logging.HeaderWarning.addWarning; +import static org.elasticsearch.xpack.esql.plan.logical.Enrich.Mode; import static org.elasticsearch.xpack.ql.parser.ParserUtils.source; import static org.elasticsearch.xpack.ql.parser.ParserUtils.typedParsing; import static org.elasticsearch.xpack.ql.parser.ParserUtils.visitList; @@ -311,21 +313,20 @@ public LogicalPlan visitShowFunctions(EsqlBaseParser.ShowFunctionsContext ctx) { @Override public PlanFactory visitEnrichCommand(EsqlBaseParser.EnrichCommandContext ctx) { return p -> { - String policyName = visitFromIdentifier(ctx.policyName); + String policyName = ctx.policyName.getText(); var source = source(ctx); + Mode mode = enrichMode(ctx.setting()); + NamedExpression matchField = ctx.ON() != null ? visitQualifiedNamePattern(ctx.matchField) : new EmptyAttribute(source); if (matchField.name().contains("*")) { - throw new ParsingException( - source(ctx), - "Using wildcards (*) in ENRICH WITH projections is not allowed [{}]", - matchField.name() - ); + throw new ParsingException(source, "Using wildcards (*) in ENRICH WITH projections is not allowed [{}]", matchField.name()); } List keepClauses = visitList(this, ctx.enrichWithClause(), NamedExpression.class); return new Enrich( source, p, + mode, new Literal(source(ctx.policyName), policyName, DataTypes.KEYWORD), matchField, null, @@ -334,5 +335,35 @@ public PlanFactory visitEnrichCommand(EsqlBaseParser.EnrichCommandContext ctx) { }; } + private Mode enrichMode(List setting) { + if (setting == null || setting.isEmpty()) { + return null; + } + var s = setting.get(0); + var source = source(s); + if (setting.size() > 1) { + throw new ParsingException(source, "Only one setting allowed for now in ENRICH"); + } + String mode = "ccq.mode"; + + var nameText = s.name.getText(); + if (mode.equals(nameText.toLowerCase(Locale.ROOT)) == false) { + throw new ParsingException(source(s.name), "Unsupported setting [{}], expected [{}]", nameText, mode); + } + + var valueText = s.value.getText(); + Enrich.Mode m = Enrich.Mode.from(valueText); + if (m == null) { + throw new ParsingException( + source(s.value), + "Unrecognized value [{}], ENRICH [{}] needs to be one of {}", + valueText, + nameText, + Enrich.Mode.values() + ); + } + return m; + } + interface PlanFactory extends Function {} } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Enrich.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Enrich.java index 1ad73be7902f7..37a0ff0fe5001 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Enrich.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Enrich.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.plan.logical; +import org.elasticsearch.common.util.Maps; import org.elasticsearch.xpack.esql.enrich.EnrichPolicyResolution; import org.elasticsearch.xpack.ql.capabilities.Resolvables; import org.elasticsearch.xpack.ql.expression.Attribute; @@ -19,6 +20,8 @@ import org.elasticsearch.xpack.ql.tree.Source; import java.util.List; +import java.util.Locale; +import java.util.Map; import java.util.Objects; import static org.elasticsearch.xpack.esql.expression.NamedExpressions.mergeOutputAttributes; @@ -30,15 +33,39 @@ public class Enrich extends UnaryPlan { private List enrichFields; private List output; + private final Mode mode; + + public enum Mode { + ANY, + COORDINATOR, + REMOTE; + + private static final Map map; + + static { + var values = Mode.values(); + map = Maps.newMapWithExpectedSize(values.length); + for (Mode m : values) { + map.put(m.name(), m); + } + } + + public static Mode from(String name) { + return name == null ? null : map.get(name.toUpperCase(Locale.ROOT)); + } + } + public Enrich( Source source, LogicalPlan child, + Mode mode, Expression policyName, NamedExpression matchField, EnrichPolicyResolution policy, List enrichFields ) { super(source, child); + this.mode = mode == null ? Mode.ANY : mode; this.policyName = policyName; this.matchField = matchField; this.policy = policy; @@ -61,6 +88,10 @@ public Expression policyName() { return policyName; } + public Mode mode() { + return mode; + } + @Override public boolean expressionsResolved() { return policyName.resolved() @@ -71,12 +102,12 @@ public boolean expressionsResolved() { @Override public UnaryPlan replaceChild(LogicalPlan newChild) { - return new Enrich(source(), newChild, policyName, matchField, policy, enrichFields); + return new Enrich(source(), newChild, mode, policyName, matchField, policy, enrichFields); } @Override protected NodeInfo info() { - return NodeInfo.create(this, Enrich::new, child(), policyName, matchField, policy, enrichFields); + return NodeInfo.create(this, Enrich::new, child(), mode, policyName, matchField, policy, enrichFields); } @Override @@ -96,7 +127,8 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; if (super.equals(o) == false) return false; Enrich enrich = (Enrich) o; - return Objects.equals(policyName, enrich.policyName) + return Objects.equals(mode, enrich.mode) + && Objects.equals(policyName, enrich.policyName) && Objects.equals(matchField, enrich.matchField) && Objects.equals(policy, enrich.policy) && Objects.equals(enrichFields, enrich.enrichFields); @@ -104,6 +136,6 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(super.hashCode(), policyName, matchField, policy, enrichFields); + return Objects.hash(super.hashCode(), mode, policyName, matchField, policy, enrichFields); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java index b20d166beb22e..931c96a8cb8ed 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java @@ -676,7 +676,15 @@ public void testLikeRLike() { public void testEnrich() { assertEquals( - new Enrich(EMPTY, PROCESSING_CMD_INPUT, new Literal(EMPTY, "countries", KEYWORD), new EmptyAttribute(EMPTY), null, List.of()), + new Enrich( + EMPTY, + PROCESSING_CMD_INPUT, + null, + new Literal(EMPTY, "countries", KEYWORD), + new EmptyAttribute(EMPTY), + null, + List.of() + ), processingCommand("enrich countries") ); @@ -684,12 +692,27 @@ public void testEnrich() { new Enrich( EMPTY, PROCESSING_CMD_INPUT, + null, + new Literal(EMPTY, "index-policy", KEYWORD), + new UnresolvedAttribute(EMPTY, "field_underscore"), + null, + List.of() + ), + processingCommand("enrich index-policy ON field_underscore") + ); + + Enrich.Mode mode = randomFrom(Enrich.Mode.values()); + assertEquals( + new Enrich( + EMPTY, + PROCESSING_CMD_INPUT, + mode, new Literal(EMPTY, "countries", KEYWORD), new UnresolvedAttribute(EMPTY, "country_code"), null, List.of() ), - processingCommand("enrich countries ON country_code") + processingCommand("enrich [ccq.mode :" + mode.name() + "] countries ON country_code") ); expectError("from a | enrich countries on foo* ", "Using wildcards (*) in ENRICH WITH projections is not allowed [foo*]"); @@ -702,6 +725,10 @@ public void testEnrich() { "from a | enrich countries on foo with x* = bar ", "Using wildcards (*) in ENRICH WITH projections is not allowed [x*]" ); + expectError( + "from a | enrich [ccq.mode : typo] countries on foo", + "line 1:30: Unrecognized value [typo], ENRICH [ccq.mode] needs to be one of [ANY, COORDINATOR, REMOTE]" + ); } public void testMvExpand() { From f4aaa20f28661143535461b586ef18681561e4d1 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Thu, 11 Jan 2024 10:53:50 +0200 Subject: [PATCH 30/75] Add support for the `type` parameter to the Query API Key API (#103695) This adds support for the type parameter to the Query API key API. The type for an API Key can currently be either rest or cross_cluster. Relates: #101691 --- .../rest-api/security/query-api-key.asciidoc | 5 + .../xpack/security/QueryApiKeyIT.java | 143 +++++++++++++++++- .../xpack/security/apikey/ApiKeyRestIT.java | 71 +++++++++ .../apikey/TransportQueryApiKeyAction.java | 30 +++- .../support/ApiKeyBoolQueryBuilder.java | 43 ++++-- .../support/ApiKeyFieldNameTranslators.java | 3 + .../support/ApiKeyBoolQueryBuilderTests.java | 142 +++++++++++++---- .../ApiKeyBackwardsCompatibilityIT.java | 51 ++++++- 8 files changed, 436 insertions(+), 52 deletions(-) diff --git a/docs/reference/rest-api/security/query-api-key.asciidoc b/docs/reference/rest-api/security/query-api-key.asciidoc index 0e5973a010a47..67b0b7bfac58d 100644 --- a/docs/reference/rest-api/security/query-api-key.asciidoc +++ b/docs/reference/rest-api/security/query-api-key.asciidoc @@ -64,6 +64,11 @@ You can query the following public values associated with an API key. `id`:: ID of the API key. Note `id` must be queried with the <> query. +`type`:: +API keys can be of type `rest`, if created via the <> or +the <> APIs, or of type `cross_cluster` if created via +the <> API. + `name`:: Name of the API key. diff --git a/x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/QueryApiKeyIT.java b/x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/QueryApiKeyIT.java index f79077ae3a550..18d9dcdc822e5 100644 --- a/x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/QueryApiKeyIT.java +++ b/x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/QueryApiKeyIT.java @@ -9,8 +9,10 @@ import org.apache.http.HttpHeaders; import org.elasticsearch.client.Request; +import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; +import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.core.Strings; import org.elasticsearch.core.Tuple; import org.elasticsearch.test.XContentTestUtils; @@ -21,6 +23,7 @@ import java.time.Instant; import java.util.ArrayList; import java.util.Base64; +import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Set; @@ -43,6 +46,8 @@ public class QueryApiKeyIT extends SecurityInBasicRestTestCase { private static final String API_KEY_ADMIN_AUTH_HEADER = "Basic YXBpX2tleV9hZG1pbjpzZWN1cml0eS10ZXN0LXBhc3N3b3Jk"; private static final String API_KEY_USER_AUTH_HEADER = "Basic YXBpX2tleV91c2VyOnNlY3VyaXR5LXRlc3QtcGFzc3dvcmQ="; private static final String TEST_USER_AUTH_HEADER = "Basic c2VjdXJpdHlfdGVzdF91c2VyOnNlY3VyaXR5LXRlc3QtcGFzc3dvcmQ="; + private static final String SYSTEM_WRITE_ROLE_NAME = "system_write"; + private static final String SUPERUSER_WITH_SYSTEM_WRITE = "superuser_with_system_write"; public void testQuery() throws IOException { createApiKeys(); @@ -297,6 +302,71 @@ public void testPagination() throws IOException, InterruptedException { assertThat(responseMap2.get("count"), equalTo(0)); } + public void testTypeField() throws Exception { + final List allApiKeyIds = new ArrayList<>(7); + for (int i = 0; i < 7; i++) { + allApiKeyIds.add( + createApiKey("typed_key_" + i, Map.of(), randomFrom(API_KEY_ADMIN_AUTH_HEADER, API_KEY_USER_AUTH_HEADER)).v1() + ); + } + List apiKeyIdsSubset = randomSubsetOf(allApiKeyIds); + List apiKeyIdsSubsetDifference = new ArrayList<>(allApiKeyIds); + apiKeyIdsSubsetDifference.removeAll(apiKeyIdsSubset); + + List apiKeyRestTypeQueries = List.of(""" + {"query": {"term": {"type": "rest" }}}""", """ + {"query": {"bool": {"must_not": [{"term": {"type": "cross_cluster"}}, {"term": {"type": "other"}}]}}}""", """ + {"query": {"prefix": {"type": "re" }}}""", """ + {"query": {"wildcard": {"type": "r*t" }}}""", """ + {"query": {"range": {"type": {"gte": "raaa", "lte": "rzzz"}}}}"""); + + for (String query : apiKeyRestTypeQueries) { + assertQuery(API_KEY_ADMIN_AUTH_HEADER, query, apiKeys -> { + assertThat( + apiKeys.stream().map(k -> (String) k.get("id")).toList(), + containsInAnyOrder(allApiKeyIds.toArray(new String[0])) + ); + }); + } + + createSystemWriteRole(SYSTEM_WRITE_ROLE_NAME); + String systemWriteCreds = createUser(SUPERUSER_WITH_SYSTEM_WRITE, new String[] { "superuser", SYSTEM_WRITE_ROLE_NAME }); + + // test keys with no "type" field are still considered of type "rest" + // this is so in order to accommodate pre-8.9 API keys which where all of type "rest" implicitly + updateApiKeys(systemWriteCreds, "ctx._source.remove('type');", apiKeyIdsSubset); + for (String query : apiKeyRestTypeQueries) { + assertQuery(API_KEY_ADMIN_AUTH_HEADER, query, apiKeys -> { + assertThat( + apiKeys.stream().map(k -> (String) k.get("id")).toList(), + containsInAnyOrder(allApiKeyIds.toArray(new String[0])) + ); + }); + } + + // but the same keys with type "other" are NOT of type "rest" + updateApiKeys(systemWriteCreds, "ctx._source['type']='other';", apiKeyIdsSubset); + for (String query : apiKeyRestTypeQueries) { + assertQuery(API_KEY_ADMIN_AUTH_HEADER, query, apiKeys -> { + assertThat( + apiKeys.stream().map(k -> (String) k.get("id")).toList(), + containsInAnyOrder(apiKeyIdsSubsetDifference.toArray(new String[0])) + ); + }); + } + // the complement set is not of type "rest" if it is "cross_cluster" + updateApiKeys(systemWriteCreds, "ctx._source['type']='rest';", apiKeyIdsSubset); + updateApiKeys(systemWriteCreds, "ctx._source['type']='cross_cluster';", apiKeyIdsSubsetDifference); + for (String query : apiKeyRestTypeQueries) { + assertQuery(API_KEY_ADMIN_AUTH_HEADER, query, apiKeys -> { + assertThat( + apiKeys.stream().map(k -> (String) k.get("id")).toList(), + containsInAnyOrder(apiKeyIdsSubset.toArray(new String[0])) + ); + }); + } + } + @SuppressWarnings("unchecked") public void testSort() throws IOException { final String authHeader = randomFrom(API_KEY_ADMIN_AUTH_HEADER, API_KEY_USER_AUTH_HEADER); @@ -598,10 +668,73 @@ private String createAndInvalidateApiKey(String name, String authHeader) throws return tuple.v1(); } - private void createUser(String name) throws IOException { - final Request request = new Request("POST", "/_security/user/" + name); - request.setJsonEntity(""" - {"password":"super-strong-password","roles":[]}"""); - assertOK(adminClient().performRequest(request)); + private String createUser(String username) throws IOException { + return createUser(username, new String[0]); + } + + private String createUser(String username, String[] roles) throws IOException { + final Request request = new Request("POST", "/_security/user/" + username); + Map body = Map.ofEntries(Map.entry("roles", roles), Map.entry("password", "super-strong-password".toString())); + request.setJsonEntity(XContentTestUtils.convertToXContent(body, XContentType.JSON).utf8ToString()); + Response response = adminClient().performRequest(request); + assertOK(response); + return basicAuthHeaderValue(username, new SecureString("super-strong-password".toCharArray())); + } + + private void createSystemWriteRole(String roleName) throws IOException { + final Request addRole = new Request("POST", "/_security/role/" + roleName); + addRole.setJsonEntity(""" + { + "indices": [ + { + "names": [ "*" ], + "privileges": ["all"], + "allow_restricted_indices" : true + } + ] + }"""); + Response response = adminClient().performRequest(addRole); + assertOK(response); + } + + private void expectWarnings(Request request, String... expectedWarnings) { + final Set expected = Set.of(expectedWarnings); + RequestOptions options = request.getOptions().toBuilder().setWarningsHandler(warnings -> { + final Set actual = Set.copyOf(warnings); + // Return true if the warnings aren't what we expected; the client will treat them as a fatal error. + return actual.equals(expected) == false; + }).build(); + request.setOptions(options); + } + + private void updateApiKeys(String creds, String script, Collection ids) throws IOException { + if (ids.isEmpty()) { + return; + } + final Request request = new Request("POST", "/.security/_update_by_query?refresh=true&wait_for_completion=true"); + request.setJsonEntity(Strings.format(""" + { + "script": { + "source": "%s", + "lang": "painless" + }, + "query": { + "bool": { + "must": [ + {"term": {"doc_type": "api_key"}}, + {"ids": {"values": %s}} + ] + } + } + } + """, script, ids.stream().map(id -> "\"" + id + "\"").collect(Collectors.toList()))); + request.setOptions(request.getOptions().toBuilder().addHeader(HttpHeaders.AUTHORIZATION, creds)); + expectWarnings( + request, + "this request accesses system indices: [.security-7]," + + " but in a future major version, direct access to system indices will be prevented by default" + ); + Response response = client().performRequest(request); + assertOK(response); } } diff --git a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/apikey/ApiKeyRestIT.java b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/apikey/ApiKeyRestIT.java index 6c4aaeada74c7..0d5a757f65084 100644 --- a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/apikey/ApiKeyRestIT.java +++ b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/apikey/ApiKeyRestIT.java @@ -35,8 +35,10 @@ import java.io.IOException; import java.time.Instant; import java.time.temporal.ChronoUnit; +import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; +import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; @@ -54,9 +56,11 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.hasEntry; +import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.iterableWithSize; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; @@ -703,6 +707,73 @@ public void testRemoteIndicesSupportForApiKeys() throws IOException { } + @SuppressWarnings("unchecked") + public void testQueryCrossClusterApiKeysByType() throws IOException { + final List apiKeyIds = new ArrayList<>(3); + for (int i = 0; i < randomIntBetween(3, 5); i++) { + Request createRequest = new Request("POST", "/_security/cross_cluster/api_key"); + createRequest.setJsonEntity(Strings.format(""" + { + "name": "test-cross-key-query-%d", + "access": { + "search": [ + { + "names": [ "whatever" ] + } + ] + }, + "metadata": { "tag": %d, "label": "rest" } + }""", i, i)); + setUserForRequest(createRequest, MANAGE_SECURITY_USER, END_USER_PASSWORD); + ObjectPath createResponse = assertOKAndCreateObjectPath(client().performRequest(createRequest)); + apiKeyIds.add(createResponse.evaluate("id")); + } + // the "cross_cluster" keys are not "rest" type + for (String restTypeQuery : List.of(""" + {"query": {"term": {"type": "rest" }}}""", """ + {"query": {"bool": {"must_not": {"term": {"type": "cross_cluster"}}}}}""", """ + {"query": {"prefix": {"type": "re" }}}""", """ + {"query": {"wildcard": {"type": "r*t" }}}""", """ + {"query": {"range": {"type": {"gte": "raaa", "lte": "rzzz"}}}}""")) { + Request queryRequest = new Request("GET", "/_security/_query/api_key"); + queryRequest.addParameter("with_limited_by", String.valueOf(randomBoolean())); + queryRequest.setJsonEntity(restTypeQuery); + setUserForRequest(queryRequest, MANAGE_API_KEY_USER, END_USER_PASSWORD); + ObjectPath queryResponse = assertOKAndCreateObjectPath(client().performRequest(queryRequest)); + assertThat(queryResponse.evaluate("total"), is(0)); + assertThat(queryResponse.evaluate("count"), is(0)); + assertThat(queryResponse.evaluate("api_keys"), iterableWithSize(0)); + } + for (String crossClusterTypeQuery : List.of(""" + {"query": {"term": {"type": "cross_cluster" }}}""", """ + {"query": {"bool": {"must_not": {"term": {"type": "rest"}}}}}""", """ + {"query": {"prefix": {"type": "cro" }}}""", """ + {"query": {"wildcard": {"type": "*oss_*er" }}}""", """ + {"query": {"range": {"type": {"gte": "cross", "lte": "zzzz"}}}}""")) { + Request queryRequest = new Request("GET", "/_security/_query/api_key"); + queryRequest.addParameter("with_limited_by", String.valueOf(randomBoolean())); + queryRequest.setJsonEntity(crossClusterTypeQuery); + setUserForRequest(queryRequest, MANAGE_API_KEY_USER, END_USER_PASSWORD); + ObjectPath queryResponse = assertOKAndCreateObjectPath(client().performRequest(queryRequest)); + assertThat(queryResponse.evaluate("total"), is(apiKeyIds.size())); + assertThat(queryResponse.evaluate("count"), is(apiKeyIds.size())); + assertThat(queryResponse.evaluate("api_keys"), iterableWithSize(apiKeyIds.size())); + Iterator apiKeys = ((List) queryResponse.evaluate("api_keys")).iterator(); + while (apiKeys.hasNext()) { + assertThat(apiKeyIds, hasItem((String) ((Map) apiKeys.next()).get("id"))); + } + } + final Request queryRequest = new Request("GET", "/_security/_query/api_key"); + queryRequest.addParameter("with_limited_by", String.valueOf(randomBoolean())); + queryRequest.setJsonEntity(""" + {"query": {"bool": {"must": [{"term": {"type": "cross_cluster" }}, {"term": {"metadata.tag": 2}}]}}}"""); + setUserForRequest(queryRequest, MANAGE_API_KEY_USER, END_USER_PASSWORD); + final ObjectPath queryResponse = assertOKAndCreateObjectPath(client().performRequest(queryRequest)); + assertThat(queryResponse.evaluate("total"), is(1)); + assertThat(queryResponse.evaluate("count"), is(1)); + assertThat(queryResponse.evaluate("api_keys.0.name"), is("test-cross-key-query-2")); + } + public void testCreateCrossClusterApiKey() throws IOException { final Request createRequest = new Request("POST", "/_security/cross_cluster/api_key"); createRequest.setJsonEntity(""" diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportQueryApiKeyAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportQueryApiKeyAction.java index 4077597a7ef16..b9961e6735c7e 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportQueryApiKeyAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportQueryApiKeyAction.java @@ -27,11 +27,26 @@ import org.elasticsearch.xpack.security.support.ApiKeyFieldNameTranslators; import java.util.List; +import java.util.Map; +import java.util.concurrent.atomic.AtomicBoolean; import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.SECURITY_MAIN_ALIAS; public final class TransportQueryApiKeyAction extends HandledTransportAction { + // API keys with no "type" field are implicitly of type "rest" (this is the case for all API Keys created before v8.9). + // The below runtime field ensures that the "type" field can be used by the {@link RestQueryApiKeyAction}, + // while making the implicit "rest" type feature transparent to the caller (hence all keys are either "rest" + // or "cross_cluster", and the "type" is always set). + // This can be improved, to get rid of the runtime performance impact of the runtime field, by reindexing + // the api key docs and setting the "type" to "rest" if empty. But the infrastructure to run such a maintenance + // task on a system index (once the cluster version permits) is not currently available. + public static final String API_KEY_TYPE_RUNTIME_MAPPING_FIELD = "runtime_key_type"; + private static final Map API_KEY_TYPE_RUNTIME_MAPPING = Map.of( + API_KEY_TYPE_RUNTIME_MAPPING_FIELD, + Map.of("type", "keyword", "script", Map.of("source", "emit(field('type').get(\"rest\"));")) + ); + private final ApiKeyService apiKeyService; private final SecurityContext securityContext; @@ -66,12 +81,19 @@ protected void doExecute(Task task, QueryApiKeyRequest request, ActionListener { + if (API_KEY_TYPE_RUNTIME_MAPPING_FIELD.equals(fieldName)) { + accessesApiKeyTypeField.set(true); + } + }, request.isFilterForCurrentUser() ? authentication : null); searchSourceBuilder.query(apiKeyBoolQueryBuilder); + // only add the query-level runtime field to the search request if it's actually referring the "type" field + if (accessesApiKeyTypeField.get()) { + searchSourceBuilder.runtimeMappings(API_KEY_TYPE_RUNTIME_MAPPING); + } + if (request.getFieldSortBuilders() != null) { translateFieldSortBuilders(request.getFieldSortBuilders(), searchSourceBuilder); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/ApiKeyBoolQueryBuilder.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/ApiKeyBoolQueryBuilder.java index 28ecd5ffe5b57..5cb6573c8b5dc 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/ApiKeyBoolQueryBuilder.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/ApiKeyBoolQueryBuilder.java @@ -28,6 +28,9 @@ import java.io.IOException; import java.util.Set; +import java.util.function.Consumer; + +import static org.elasticsearch.xpack.security.action.apikey.TransportQueryApiKeyAction.API_KEY_TYPE_RUNTIME_MAPPING_FIELD; public class ApiKeyBoolQueryBuilder extends BoolQueryBuilder { @@ -36,10 +39,14 @@ public class ApiKeyBoolQueryBuilder extends BoolQueryBuilder { "_id", "doc_type", "name", + "type", + API_KEY_TYPE_RUNTIME_MAPPING_FIELD, "api_key_invalidated", "invalidation_time", "creation_time", - "expiration_time" + "expiration_time", + "creator.principal", + "creator.realm" ); private ApiKeyBoolQueryBuilder() {} @@ -56,17 +63,23 @@ private ApiKeyBoolQueryBuilder() {} * * @param queryBuilder This represents the query parsed directly from the user input. It is validated * and transformed (see above). + * @param fieldNameVisitor This {@code Consumer} is invoked with all the (index-level) field names referred to in the passed-in query. * @param authentication The user's authentication object. If present, it will be used to filter the results * to only include API keys owned by the user. * @return A specialised query builder for API keys that is safe to run on the security index. */ - public static ApiKeyBoolQueryBuilder build(QueryBuilder queryBuilder, @Nullable Authentication authentication) { + public static ApiKeyBoolQueryBuilder build( + QueryBuilder queryBuilder, + Consumer fieldNameVisitor, + @Nullable Authentication authentication + ) { final ApiKeyBoolQueryBuilder finalQuery = new ApiKeyBoolQueryBuilder(); if (queryBuilder != null) { - QueryBuilder processedQuery = doProcess(queryBuilder); + QueryBuilder processedQuery = doProcess(queryBuilder, fieldNameVisitor); finalQuery.must(processedQuery); } finalQuery.filter(QueryBuilders.termQuery("doc_type", "api_key")); + fieldNameVisitor.accept("doc_type"); if (authentication != null) { if (authentication.isApiKey()) { @@ -77,8 +90,10 @@ public static ApiKeyBoolQueryBuilder build(QueryBuilder queryBuilder, @Nullable finalQuery.filter(QueryBuilders.idsQuery().addIds(apiKeyId)); } else { finalQuery.filter(QueryBuilders.termQuery("creator.principal", authentication.getEffectiveSubject().getUser().principal())); + fieldNameVisitor.accept("creator.principal"); final String[] realms = ApiKeyService.getOwnersRealmNames(authentication); final QueryBuilder realmsQuery = ApiKeyService.filterForRealmNames(realms); + fieldNameVisitor.accept("creator.realm"); assert realmsQuery != null; finalQuery.filter(realmsQuery); } @@ -86,15 +101,15 @@ public static ApiKeyBoolQueryBuilder build(QueryBuilder queryBuilder, @Nullable return finalQuery; } - private static QueryBuilder doProcess(QueryBuilder qb) { + private static QueryBuilder doProcess(QueryBuilder qb, Consumer fieldNameVisitor) { if (qb instanceof final BoolQueryBuilder query) { final BoolQueryBuilder newQuery = QueryBuilders.boolQuery() .minimumShouldMatch(query.minimumShouldMatch()) .adjustPureNegative(query.adjustPureNegative()); - query.must().stream().map(ApiKeyBoolQueryBuilder::doProcess).forEach(newQuery::must); - query.should().stream().map(ApiKeyBoolQueryBuilder::doProcess).forEach(newQuery::should); - query.mustNot().stream().map(ApiKeyBoolQueryBuilder::doProcess).forEach(newQuery::mustNot); - query.filter().stream().map(ApiKeyBoolQueryBuilder::doProcess).forEach(newQuery::filter); + query.must().stream().map(q -> ApiKeyBoolQueryBuilder.doProcess(q, fieldNameVisitor)).forEach(newQuery::must); + query.should().stream().map(q -> ApiKeyBoolQueryBuilder.doProcess(q, fieldNameVisitor)).forEach(newQuery::should); + query.mustNot().stream().map(q -> ApiKeyBoolQueryBuilder.doProcess(q, fieldNameVisitor)).forEach(newQuery::mustNot); + query.filter().stream().map(q -> ApiKeyBoolQueryBuilder.doProcess(q, fieldNameVisitor)).forEach(newQuery::filter); return newQuery; } else if (qb instanceof MatchAllQueryBuilder) { return qb; @@ -102,29 +117,35 @@ private static QueryBuilder doProcess(QueryBuilder qb) { return qb; } else if (qb instanceof final TermQueryBuilder query) { final String translatedFieldName = ApiKeyFieldNameTranslators.translate(query.fieldName()); + fieldNameVisitor.accept(translatedFieldName); return QueryBuilders.termQuery(translatedFieldName, query.value()).caseInsensitive(query.caseInsensitive()); } else if (qb instanceof final ExistsQueryBuilder query) { final String translatedFieldName = ApiKeyFieldNameTranslators.translate(query.fieldName()); + fieldNameVisitor.accept(translatedFieldName); return QueryBuilders.existsQuery(translatedFieldName); } else if (qb instanceof final TermsQueryBuilder query) { if (query.termsLookup() != null) { throw new IllegalArgumentException("terms query with terms lookup is not supported for API Key query"); } final String translatedFieldName = ApiKeyFieldNameTranslators.translate(query.fieldName()); + fieldNameVisitor.accept(translatedFieldName); return QueryBuilders.termsQuery(translatedFieldName, query.getValues()); } else if (qb instanceof final PrefixQueryBuilder query) { final String translatedFieldName = ApiKeyFieldNameTranslators.translate(query.fieldName()); + fieldNameVisitor.accept(translatedFieldName); return QueryBuilders.prefixQuery(translatedFieldName, query.value()).caseInsensitive(query.caseInsensitive()); } else if (qb instanceof final WildcardQueryBuilder query) { final String translatedFieldName = ApiKeyFieldNameTranslators.translate(query.fieldName()); + fieldNameVisitor.accept(translatedFieldName); return QueryBuilders.wildcardQuery(translatedFieldName, query.value()) .caseInsensitive(query.caseInsensitive()) .rewrite(query.rewrite()); } else if (qb instanceof final RangeQueryBuilder query) { - final String translatedFieldName = ApiKeyFieldNameTranslators.translate(query.fieldName()); if (query.relation() != null) { throw new IllegalArgumentException("range query with relation is not supported for API Key query"); } + final String translatedFieldName = ApiKeyFieldNameTranslators.translate(query.fieldName()); + fieldNameVisitor.accept(translatedFieldName); final RangeQueryBuilder newQuery = QueryBuilders.rangeQuery(translatedFieldName); if (query.format() != null) { newQuery.format(query.format()); @@ -159,9 +180,7 @@ protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws } static boolean isIndexFieldNameAllowed(String fieldName) { - return ALLOWED_EXACT_INDEX_FIELD_NAMES.contains(fieldName) - || fieldName.startsWith("metadata_flattened.") - || fieldName.startsWith("creator."); + return ALLOWED_EXACT_INDEX_FIELD_NAMES.contains(fieldName) || fieldName.startsWith("metadata_flattened."); } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/ApiKeyFieldNameTranslators.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/ApiKeyFieldNameTranslators.java index 4d7cc9d978cd4..c204ec031b18c 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/ApiKeyFieldNameTranslators.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/ApiKeyFieldNameTranslators.java @@ -10,6 +10,8 @@ import java.util.List; import java.util.function.Function; +import static org.elasticsearch.xpack.security.action.apikey.TransportQueryApiKeyAction.API_KEY_TYPE_RUNTIME_MAPPING_FIELD; + /** * A class to translate query level field names to index level field names. */ @@ -21,6 +23,7 @@ public class ApiKeyFieldNameTranslators { new ExactFieldNameTranslator(s -> "creator.principal", "username"), new ExactFieldNameTranslator(s -> "creator.realm", "realm_name"), new ExactFieldNameTranslator(Function.identity(), "name"), + new ExactFieldNameTranslator(s -> API_KEY_TYPE_RUNTIME_MAPPING_FIELD, "type"), new ExactFieldNameTranslator(s -> "creation_time", "creation"), new ExactFieldNameTranslator(s -> "expiration_time", "expiration"), new ExactFieldNameTranslator(s -> "api_key_invalidated", "invalidated"), diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/ApiKeyBoolQueryBuilderTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/ApiKeyBoolQueryBuilderTests.java index 477409f22369f..235657a30e11f 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/ApiKeyBoolQueryBuilderTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/ApiKeyBoolQueryBuilderTests.java @@ -29,11 +29,13 @@ import org.elasticsearch.xpack.core.security.authc.AuthenticationTests; import org.elasticsearch.xpack.core.security.authc.RealmConfig; import org.elasticsearch.xpack.core.security.user.User; +import org.elasticsearch.xpack.security.action.apikey.TransportQueryApiKeyAction; import org.elasticsearch.xpack.security.authc.ApiKeyService; import java.io.IOException; import java.time.Instant; import java.time.temporal.ChronoUnit; +import java.util.ArrayList; import java.util.List; import java.util.function.Predicate; @@ -57,7 +59,9 @@ public class ApiKeyBoolQueryBuilderTests extends ESTestCase { public void testBuildFromSimpleQuery() { final Authentication authentication = randomBoolean() ? AuthenticationTests.randomAuthentication(null, null) : null; final QueryBuilder q1 = randomSimpleQuery("name"); - final ApiKeyBoolQueryBuilder apiKeyQb1 = ApiKeyBoolQueryBuilder.build(q1, authentication); + final List queryFields = new ArrayList<>(); + final ApiKeyBoolQueryBuilder apiKeyQb1 = ApiKeyBoolQueryBuilder.build(q1, queryFields::add, authentication); + assertQueryFields(queryFields, q1, authentication); assertCommonFilterQueries(apiKeyQb1, authentication); final List mustQueries = apiKeyQb1.must(); assertThat(mustQueries, hasSize(1)); @@ -69,7 +73,9 @@ public void testBuildFromSimpleQuery() { public void testQueryForDomainAuthentication() { final Authentication authentication = AuthenticationTests.randomAuthentication(null, AuthenticationTests.randomRealmRef(true)); final QueryBuilder query = randomSimpleQuery("name"); - final ApiKeyBoolQueryBuilder apiKeysQuery = ApiKeyBoolQueryBuilder.build(query, authentication); + final List queryFields = new ArrayList<>(); + final ApiKeyBoolQueryBuilder apiKeysQuery = ApiKeyBoolQueryBuilder.build(query, queryFields::add, authentication); + assertQueryFields(queryFields, query, authentication); assertThat(apiKeysQuery.filter().get(0), is(QueryBuilders.termQuery("doc_type", "api_key"))); assertThat( apiKeysQuery.filter().get(1), @@ -102,18 +108,23 @@ public void testQueryForDomainAuthentication() { public void testBuildFromBoolQuery() { final Authentication authentication = randomBoolean() ? AuthenticationTests.randomAuthentication(null, null) : null; + final List queryFields = new ArrayList<>(); final BoolQueryBuilder bq1 = QueryBuilders.boolQuery(); + boolean accessesNameField = false; if (randomBoolean()) { bq1.must(QueryBuilders.prefixQuery("name", "prod-")); + accessesNameField = true; } if (randomBoolean()) { bq1.should(QueryBuilders.wildcardQuery("name", "*-east-*")); + accessesNameField = true; } if (randomBoolean()) { bq1.filter( QueryBuilders.termsQuery("name", randomArray(3, 8, String[]::new, () -> "prod-" + randomInt() + "-east-" + randomInt())) ); + accessesNameField = true; } if (randomBoolean()) { bq1.mustNot(QueryBuilders.idsQuery().addIds(randomArray(1, 3, String[]::new, () -> randomAlphaOfLength(22)))); @@ -121,9 +132,18 @@ public void testBuildFromBoolQuery() { if (randomBoolean()) { bq1.minimumShouldMatch(randomIntBetween(1, 2)); } - final ApiKeyBoolQueryBuilder apiKeyQb1 = ApiKeyBoolQueryBuilder.build(bq1, authentication); + final ApiKeyBoolQueryBuilder apiKeyQb1 = ApiKeyBoolQueryBuilder.build(bq1, queryFields::add, authentication); assertCommonFilterQueries(apiKeyQb1, authentication); + assertThat(queryFields, hasItem("doc_type")); + if (accessesNameField) { + assertThat(queryFields, hasItem("name")); + } + if (authentication != null && authentication.isApiKey() == false) { + assertThat(queryFields, hasItem("creator.principal")); + assertThat(queryFields, hasItem("creator.realm")); + } + assertThat(apiKeyQb1.must(), hasSize(1)); assertThat(apiKeyQb1.should(), empty()); assertThat(apiKeyQb1.mustNot(), empty()); @@ -141,35 +161,78 @@ public void testFieldNameTranslation() { final Authentication authentication = randomBoolean() ? AuthenticationTests.randomAuthentication(null, null) : null; // metadata - final String metadataKey = randomAlphaOfLengthBetween(3, 8); - final TermQueryBuilder q1 = QueryBuilders.termQuery("metadata." + metadataKey, randomAlphaOfLengthBetween(3, 8)); - final ApiKeyBoolQueryBuilder apiKeyQb1 = ApiKeyBoolQueryBuilder.build(q1, authentication); - assertCommonFilterQueries(apiKeyQb1, authentication); - assertThat(apiKeyQb1.must().get(0), equalTo(QueryBuilders.termQuery("metadata_flattened." + metadataKey, q1.value()))); + { + final List queryFields = new ArrayList<>(); + final String metadataKey = randomAlphaOfLengthBetween(3, 8); + final TermQueryBuilder q1 = QueryBuilders.termQuery("metadata." + metadataKey, randomAlphaOfLengthBetween(3, 8)); + final ApiKeyBoolQueryBuilder apiKeyQb1 = ApiKeyBoolQueryBuilder.build(q1, queryFields::add, authentication); + assertThat(queryFields, hasItem("doc_type")); + assertThat(queryFields, hasItem("metadata_flattened." + metadataKey)); + if (authentication != null && authentication.isApiKey() == false) { + assertThat(queryFields, hasItem("creator.principal")); + assertThat(queryFields, hasItem("creator.realm")); + } + assertCommonFilterQueries(apiKeyQb1, authentication); + assertThat(apiKeyQb1.must().get(0), equalTo(QueryBuilders.termQuery("metadata_flattened." + metadataKey, q1.value()))); + } // username - final PrefixQueryBuilder q2 = QueryBuilders.prefixQuery("username", randomAlphaOfLength(3)); - final ApiKeyBoolQueryBuilder apiKeyQb2 = ApiKeyBoolQueryBuilder.build(q2, authentication); - assertCommonFilterQueries(apiKeyQb2, authentication); - assertThat(apiKeyQb2.must().get(0), equalTo(QueryBuilders.prefixQuery("creator.principal", q2.value()))); + { + final List queryFields = new ArrayList<>(); + final PrefixQueryBuilder q2 = QueryBuilders.prefixQuery("username", randomAlphaOfLength(3)); + final ApiKeyBoolQueryBuilder apiKeyQb2 = ApiKeyBoolQueryBuilder.build(q2, queryFields::add, authentication); + assertThat(queryFields, hasItem("doc_type")); + assertThat(queryFields, hasItem("creator.principal")); + if (authentication != null && authentication.isApiKey() == false) { + assertThat(queryFields, hasItem("creator.realm")); + } + assertCommonFilterQueries(apiKeyQb2, authentication); + assertThat(apiKeyQb2.must().get(0), equalTo(QueryBuilders.prefixQuery("creator.principal", q2.value()))); + } // realm name - final WildcardQueryBuilder q3 = QueryBuilders.wildcardQuery("realm_name", "*" + randomAlphaOfLength(3)); - final ApiKeyBoolQueryBuilder apiKeyQb3 = ApiKeyBoolQueryBuilder.build(q3, authentication); - assertCommonFilterQueries(apiKeyQb3, authentication); - assertThat(apiKeyQb3.must().get(0), equalTo(QueryBuilders.wildcardQuery("creator.realm", q3.value()))); + { + final List queryFields = new ArrayList<>(); + final WildcardQueryBuilder q3 = QueryBuilders.wildcardQuery("realm_name", "*" + randomAlphaOfLength(3)); + final ApiKeyBoolQueryBuilder apiKeyQb3 = ApiKeyBoolQueryBuilder.build(q3, queryFields::add, authentication); + assertThat(queryFields, hasItem("doc_type")); + assertThat(queryFields, hasItem("creator.realm")); + if (authentication != null && authentication.isApiKey() == false) { + assertThat(queryFields, hasItem("creator.principal")); + } + assertCommonFilterQueries(apiKeyQb3, authentication); + assertThat(apiKeyQb3.must().get(0), equalTo(QueryBuilders.wildcardQuery("creator.realm", q3.value()))); + } // creation_time - final TermQueryBuilder q4 = QueryBuilders.termQuery("creation", randomLongBetween(0, Long.MAX_VALUE)); - final ApiKeyBoolQueryBuilder apiKeyQb4 = ApiKeyBoolQueryBuilder.build(q4, authentication); - assertCommonFilterQueries(apiKeyQb4, authentication); - assertThat(apiKeyQb4.must().get(0), equalTo(QueryBuilders.termQuery("creation_time", q4.value()))); + { + final List queryFields = new ArrayList<>(); + final TermQueryBuilder q4 = QueryBuilders.termQuery("creation", randomLongBetween(0, Long.MAX_VALUE)); + final ApiKeyBoolQueryBuilder apiKeyQb4 = ApiKeyBoolQueryBuilder.build(q4, queryFields::add, authentication); + assertThat(queryFields, hasItem("doc_type")); + assertThat(queryFields, hasItem("creation_time")); + if (authentication != null && authentication.isApiKey() == false) { + assertThat(queryFields, hasItem("creator.principal")); + assertThat(queryFields, hasItem("creator.realm")); + } + assertCommonFilterQueries(apiKeyQb4, authentication); + assertThat(apiKeyQb4.must().get(0), equalTo(QueryBuilders.termQuery("creation_time", q4.value()))); + } // expiration_time - final TermQueryBuilder q5 = QueryBuilders.termQuery("expiration", randomLongBetween(0, Long.MAX_VALUE)); - final ApiKeyBoolQueryBuilder apiKeyQb5 = ApiKeyBoolQueryBuilder.build(q5, authentication); - assertCommonFilterQueries(apiKeyQb5, authentication); - assertThat(apiKeyQb5.must().get(0), equalTo(QueryBuilders.termQuery("expiration_time", q5.value()))); + { + final List queryFields = new ArrayList<>(); + final TermQueryBuilder q5 = QueryBuilders.termQuery("expiration", randomLongBetween(0, Long.MAX_VALUE)); + final ApiKeyBoolQueryBuilder apiKeyQb5 = ApiKeyBoolQueryBuilder.build(q5, queryFields::add, authentication); + assertThat(queryFields, hasItem("doc_type")); + assertThat(queryFields, hasItem("expiration_time")); + if (authentication != null && authentication.isApiKey() == false) { + assertThat(queryFields, hasItem("creator.principal")); + assertThat(queryFields, hasItem("creator.realm")); + } + assertCommonFilterQueries(apiKeyQb5, authentication); + assertThat(apiKeyQb5.must().get(0), equalTo(QueryBuilders.termQuery("expiration_time", q5.value()))); + } } public void testAllowListOfFieldNames() { @@ -197,7 +260,7 @@ public void testAllowListOfFieldNames() { ); final IllegalArgumentException e1 = expectThrows( IllegalArgumentException.class, - () -> ApiKeyBoolQueryBuilder.build(q1, authentication) + () -> ApiKeyBoolQueryBuilder.build(q1, ignored -> {}, authentication) ); assertThat(e1.getMessage(), containsString("Field [" + fieldName + "] is not allowed for API Key query")); @@ -208,7 +271,7 @@ public void testTermsLookupIsNotAllowed() { final TermsQueryBuilder q1 = QueryBuilders.termsLookupQuery("name", new TermsLookup("lookup", "1", "names")); final IllegalArgumentException e1 = expectThrows( IllegalArgumentException.class, - () -> ApiKeyBoolQueryBuilder.build(q1, authentication) + () -> ApiKeyBoolQueryBuilder.build(q1, ignored -> {}, authentication) ); assertThat(e1.getMessage(), containsString("terms query with terms lookup is not supported for API Key query")); } @@ -218,7 +281,7 @@ public void testRangeQueryWithRelationIsNotAllowed() { final RangeQueryBuilder q1 = QueryBuilders.rangeQuery("creation").relation("contains"); final IllegalArgumentException e1 = expectThrows( IllegalArgumentException.class, - () -> ApiKeyBoolQueryBuilder.build(q1, authentication) + () -> ApiKeyBoolQueryBuilder.build(q1, ignored -> {}, authentication) ); assertThat(e1.getMessage(), containsString("range query with relation is not supported for API Key query")); } @@ -266,7 +329,7 @@ public void testDisallowedQueryTypes() { final IllegalArgumentException e1 = expectThrows( IllegalArgumentException.class, - () -> ApiKeyBoolQueryBuilder.build(q1, authentication) + () -> ApiKeyBoolQueryBuilder.build(q1, ignored -> {}, authentication) ); assertThat(e1.getMessage(), containsString("Query type [" + q1.getName() + "] is not supported for API Key query")); } @@ -274,6 +337,7 @@ public void testDisallowedQueryTypes() { public void testWillSetAllowedFields() throws IOException { final ApiKeyBoolQueryBuilder apiKeyQb1 = ApiKeyBoolQueryBuilder.build( randomSimpleQuery("name"), + ignored -> {}, randomBoolean() ? AuthenticationTests.randomAuthentication(null, null) : null ); @@ -305,7 +369,11 @@ public void testWillFilterForApiKeyId() { new User(randomAlphaOfLengthBetween(5, 8)), apiKeyId ); - final ApiKeyBoolQueryBuilder apiKeyQb = ApiKeyBoolQueryBuilder.build(randomFrom(randomSimpleQuery("name"), null), authentication); + final ApiKeyBoolQueryBuilder apiKeyQb = ApiKeyBoolQueryBuilder.build( + randomFrom(randomSimpleQuery("name"), null), + ignored -> {}, + authentication + ); assertThat(apiKeyQb.filter(), hasItem(QueryBuilders.termQuery("doc_type", "api_key"))); assertThat(apiKeyQb.filter(), hasItem(QueryBuilders.idsQuery().addIds(apiKeyId))); } @@ -314,11 +382,14 @@ private void testAllowedIndexFieldName(Predicate predicate) { final String allowedField = randomFrom( "doc_type", "name", + "type", + TransportQueryApiKeyAction.API_KEY_TYPE_RUNTIME_MAPPING_FIELD, "api_key_invalidated", "creation_time", "expiration_time", "metadata_flattened." + randomAlphaOfLengthBetween(1, 10), - "creator." + randomAlphaOfLengthBetween(1, 10) + "creator.principal", + "creator.realm" ); assertThat(predicate, trueWith(allowedField)); @@ -362,4 +433,15 @@ private QueryBuilder randomSimpleQuery(String name) { .to(Instant.now().toEpochMilli(), randomBoolean()); }; } + + private void assertQueryFields(List actualQueryFields, QueryBuilder queryBuilder, Authentication authentication) { + assertThat(actualQueryFields, hasItem("doc_type")); + if ((queryBuilder instanceof IdsQueryBuilder || queryBuilder instanceof MatchAllQueryBuilder) == false) { + assertThat(actualQueryFields, hasItem("name")); + } + if (authentication != null && authentication.isApiKey() == false) { + assertThat(actualQueryFields, hasItem("creator.principal")); + assertThat(actualQueryFields, hasItem("creator.realm")); + } + } } diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/ApiKeyBackwardsCompatibilityIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/ApiKeyBackwardsCompatibilityIT.java index 1a37f31bffe79..2bce06543f67c 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/ApiKeyBackwardsCompatibilityIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/ApiKeyBackwardsCompatibilityIT.java @@ -12,6 +12,7 @@ import org.elasticsearch.Build; import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; +import org.elasticsearch.Version; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; @@ -39,19 +40,53 @@ import java.util.Locale; import java.util.Map; import java.util.Set; +import java.util.function.Consumer; import static org.elasticsearch.transport.RemoteClusterPortSettings.TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasItems; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; public class ApiKeyBackwardsCompatibilityIT extends AbstractUpgradeTestCase { + private static final Version UPGRADE_FROM_VERSION = Version.fromString(System.getProperty("tests.upgrade_from_version")); + private RestClient oldVersionClient = null; private RestClient newVersionClient = null; + public void testQueryRestTypeKeys() throws IOException { + assumeTrue( + "only API keys created pre-8.9 are relevant for the rest-type query bwc case", + UPGRADE_FROM_VERSION.before(Version.V_8_9_0) + ); + switch (CLUSTER_TYPE) { + case OLD -> createOrGrantApiKey(client(), "query-test-rest-key-from-old-cluster", "{}"); + case MIXED -> createOrGrantApiKey(client(), "query-test-rest-key-from-mixed-cluster", "{}"); + case UPGRADED -> { + createOrGrantApiKey(client(), "query-test-rest-key-from-upgraded-cluster", "{}"); + for (String query : List.of(""" + {"query": {"term": {"type": "rest" }}}""", """ + {"query": {"prefix": {"type": "re" }}}""", """ + {"query": {"wildcard": {"type": "r*t" }}}""", """ + {"query": {"range": {"type": {"gte": "raaa", "lte": "rzzz"}}}}""")) { + assertQuery(client(), query, apiKeys -> { + assertThat( + apiKeys.stream().map(k -> (String) k.get("name")).toList(), + hasItems( + "query-test-rest-key-from-old-cluster", + "query-test-rest-key-from-mixed-cluster", + "query-test-rest-key-from-upgraded-cluster" + ) + ); + }); + } + } + } + } + public void testCreatingAndUpdatingApiKeys() throws Exception { assumeTrue( "The remote_indices for API Keys are not supported before transport version " @@ -177,7 +212,10 @@ private Tuple createOrGrantApiKey(String roles) throws IOExcepti } private Tuple createOrGrantApiKey(RestClient client, String roles) throws IOException { - final String name = "test-api-key-" + randomAlphaOfLengthBetween(3, 5); + return createOrGrantApiKey(client, "test-api-key-" + randomAlphaOfLengthBetween(3, 5), roles); + } + + private Tuple createOrGrantApiKey(RestClient client, String name, String roles) throws IOException { final Request createApiKeyRequest; String body = Strings.format(""" { @@ -391,4 +429,15 @@ private static RoleDescriptor randomRoleDescriptor(boolean includeRemoteIndices) null ); } + + private void assertQuery(RestClient restClient, String body, Consumer>> apiKeysVerifier) throws IOException { + final Request request = new Request("GET", "/_security/_query/api_key"); + request.setJsonEntity(body); + final Response response = restClient.performRequest(request); + assertOK(response); + final Map responseMap = responseAsMap(response); + @SuppressWarnings("unchecked") + final List> apiKeys = (List>) responseMap.get("api_keys"); + apiKeysVerifier.accept(apiKeys); + } } From 06548d09aa054f77e350a0612f44c3fd39c7a084 Mon Sep 17 00:00:00 2001 From: Luigi Dell'Aquila Date: Thu, 11 Jan 2024 09:58:20 +0100 Subject: [PATCH 31/75] Fix TextFieldMapperTests (#104192) --- .../index/mapper/KeywordFieldMapperTests.java | 17 ++++-- .../index/mapper/TextFieldMapperTests.java | 61 ++++++++----------- .../index/mapper/MapperTestCase.java | 34 ++++------- 3 files changed, 48 insertions(+), 64 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java index 983054df2fbe7..b0d1da2b4f011 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java @@ -684,10 +684,14 @@ static class KeywordSyntheticSourceSupport implements SyntheticSourceSupport { @Override public SyntheticSourceExample example(int maxValues) { + return example(maxValues, false); + } + + public SyntheticSourceExample example(int maxValues, boolean loadBlockFromSource) { if (randomBoolean()) { Tuple v = generateValue(); Object loadBlock = v.v2(); - if (ignoreAbove != null && v.v2().length() > ignoreAbove) { + if (loadBlockFromSource == false && ignoreAbove != null && v.v2().length() > ignoreAbove) { loadBlock = null; } return new SyntheticSourceExample(v.v1(), v.v2(), loadBlock, this::mapping); @@ -704,9 +708,14 @@ public SyntheticSourceExample example(int maxValues) { } }); List outList = store ? outPrimary : new HashSet<>(outPrimary).stream().sorted().collect(Collectors.toList()); - List loadBlock = docValues - ? new HashSet<>(outPrimary).stream().sorted().collect(Collectors.toList()) - : List.copyOf(outList); + List loadBlock; + if (loadBlockFromSource) { + loadBlock = in; + } else if (docValues) { + loadBlock = new HashSet<>(outPrimary).stream().sorted().collect(Collectors.toList()); + } else { + loadBlock = List.copyOf(outList); + } Object loadBlockResult = loadBlock.size() == 1 ? loadBlock.get(0) : loadBlock; outList.addAll(outExtraValues); Object out = outList.size() == 1 ? outList.get(0) : outList; diff --git a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java index 639b66b260469..96a3f8ffb02e8 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java @@ -97,30 +97,6 @@ protected Object getSampleValueForDocument() { return "value"; } - @Override - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/104152") - public void testBlockLoaderFromColumnReader() throws IOException { - super.testBlockLoaderFromColumnReader(); - } - - @Override - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/104152") - public void testBlockLoaderFromRowStrideReader() throws IOException { - super.testBlockLoaderFromRowStrideReader(); - } - - @Override - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/104152") - public void testBlockLoaderFromColumnReaderWithSyntheticSource() throws IOException { - super.testBlockLoaderFromColumnReaderWithSyntheticSource(); - } - - @Override - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/104152") - public void testBlockLoaderFromRowStrideReaderWithSyntheticSource() throws IOException { - super.testBlockLoaderFromRowStrideReaderWithSyntheticSource(); - } - public final void testExistsQueryIndexDisabled() throws IOException { MapperService mapperService = createMapperService(fieldMapping(b -> { minimalMapping(b); @@ -1145,8 +1121,9 @@ protected SyntheticSourceSupport syntheticSourceSupport(boolean ignoreMalformed) boolean storeTextField = randomBoolean(); boolean storedKeywordField = storeTextField || randomBoolean(); String nullValue = storeTextField || usually() ? null : randomAlphaOfLength(2); + Integer ignoreAbove = randomBoolean() ? null : between(10, 100); KeywordFieldMapperTests.KeywordSyntheticSourceSupport keywordSupport = new KeywordFieldMapperTests.KeywordSyntheticSourceSupport( - randomBoolean() ? null : between(10, 100), + ignoreAbove, storedKeywordField, nullValue, false == storeTextField @@ -1154,25 +1131,33 @@ protected SyntheticSourceSupport syntheticSourceSupport(boolean ignoreMalformed) return new SyntheticSourceSupport() { @Override public SyntheticSourceExample example(int maxValues) { - SyntheticSourceExample delegate = keywordSupport.example(maxValues); if (storeTextField) { + SyntheticSourceExample delegate = keywordSupport.example(maxValues, true); return new SyntheticSourceExample( delegate.inputValue(), - delegate.result(), - delegate.result(), + delegate.expectedForSyntheticSource(), + delegate.expectedForBlockLoader(), b -> b.field("type", "text").field("store", true) ); } - return new SyntheticSourceExample(delegate.inputValue(), delegate.result(), delegate.blockLoaderResult(), b -> { - b.field("type", "text"); - b.startObject("fields"); - { - b.startObject(randomAlphaOfLength(4)); - delegate.mapping().accept(b); + // We'll load from _source if ignore_above is defined, otherwise we load from the keyword field. + boolean loadingFromSource = ignoreAbove != null; + SyntheticSourceExample delegate = keywordSupport.example(maxValues, loadingFromSource); + return new SyntheticSourceExample( + delegate.inputValue(), + delegate.expectedForSyntheticSource(), + delegate.expectedForBlockLoader(), + b -> { + b.field("type", "text"); + b.startObject("fields"); + { + b.startObject(randomAlphaOfLength(4)); + delegate.mapping().accept(b); + b.endObject(); + } b.endObject(); } - b.endObject(); - }); + ); } @Override @@ -1371,7 +1356,9 @@ protected boolean supportsColumnAtATimeReader(MapperService mapper, MappedFieldT String parentName = mapper.mappingLookup().parentField(ft.name()); if (parentName == null) { TextFieldMapper.TextFieldType text = (TextFieldType) ft; - return text.syntheticSourceDelegate() != null && text.syntheticSourceDelegate().hasDocValues(); + return text.syntheticSourceDelegate() != null + && text.syntheticSourceDelegate().hasDocValues() + && text.canUseSyntheticSourceDelegateForQuerying(); } MappedFieldType parent = mapper.fieldType(parentName); if (false == parent.typeName().equals(KeywordFieldMapper.CONTENT_TYPE)) { diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java index f5ca39899ea65..4a8e6eb1b758e 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java @@ -1035,8 +1035,8 @@ protected String minimalIsInvalidRoutingPathErrorMessage(Mapper mapper) { public record SyntheticSourceExample( CheckedConsumer inputValue, - CheckedConsumer result, - CheckedConsumer blockLoaderResult, + CheckedConsumer expectedForSyntheticSource, + CheckedConsumer expectedForBlockLoader, CheckedConsumer mapping ) { public SyntheticSourceExample(Object inputValue, Object result, CheckedConsumer mapping) { @@ -1063,22 +1063,15 @@ private void buildInput(XContentBuilder b) throws IOException { private String expected() throws IOException { XContentBuilder b = JsonXContent.contentBuilder().startObject().field("field"); - result.accept(b); + expectedForSyntheticSource.accept(b); return Strings.toString(b.endObject()); } - private Object expectedParsed() throws IOException { - return XContentHelper.convertToMap(JsonXContent.jsonXContent, expected(), false).get("field"); - } - - private String expectedBlockLoader() throws IOException { + private Object expectedParsedForBlockLoader() throws IOException { XContentBuilder b = JsonXContent.contentBuilder().startObject().field("field"); - blockLoaderResult.accept(b); - return Strings.toString(b.endObject()); - } - - private Object expectedParsedBlockLoader() throws IOException { - return XContentHelper.convertToMap(JsonXContent.jsonXContent, expectedBlockLoader(), false).get("field"); + expectedForBlockLoader.accept(b); + String str = Strings.toString(b.endObject()); + return XContentHelper.convertToMap(JsonXContent.jsonXContent, str, false).get("field"); } } @@ -1239,23 +1232,19 @@ public final void testSyntheticEmptyListNoDocValuesLoader() throws IOException { assertNoDocValueLoader(b -> b.startArray("field").endArray()); } - // TextFieldMapperTests @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/104152") - public void testBlockLoaderFromColumnReader() throws IOException { + public final void testBlockLoaderFromColumnReader() throws IOException { testBlockLoader(false, true); } - // TextFieldMapperTests @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/104152") - public void testBlockLoaderFromRowStrideReader() throws IOException { + public final void testBlockLoaderFromRowStrideReader() throws IOException { testBlockLoader(false, false); } - // TextFieldMapperTests @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/104152") - public void testBlockLoaderFromColumnReaderWithSyntheticSource() throws IOException { + public final void testBlockLoaderFromColumnReaderWithSyntheticSource() throws IOException { testBlockLoader(true, true); } // Removed 'final' to silence this test in GeoPointFieldMapperTests, which does not support synthetic source completely - // TextFieldMapperTests @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/104152") public void testBlockLoaderFromRowStrideReaderWithSyntheticSource() throws IOException { testBlockLoader(true, false); } @@ -1343,8 +1332,7 @@ public FieldNamesFieldMapper.FieldNamesFieldType fieldNames() { inBlock = valuesConvert.apply(inBlock); } } - // If we're reading from _source we expect the order to be preserved, otherwise it's jumbled. - Object expected = loader instanceof BlockSourceReader ? example.expectedParsed() : example.expectedParsedBlockLoader(); + Object expected = example.expectedParsedForBlockLoader(); if (List.of().equals(expected)) { assertThat(inBlock, nullValue()); return; From 3145e9f9fc425295ca1a5ee5bc71a8e78afee7b1 Mon Sep 17 00:00:00 2001 From: Marco Liberati Date: Thu, 11 Jan 2024 10:15:30 +0100 Subject: [PATCH 32/75] [ES|QL] Annotate all ESQL functions (for SHOW FUNCTIONS) (#103686) * :wrench: Initial annotations * :pencil2: Add substring doc * :label: Fix imports * :pencil2: Add annotations for string fns * :recycle: Fix issues * :rotatin_light: Fix linting issues * :pencil2: Add more annotations * :rotatin_light: Fix linting issues * :recycle: Add missing import * :pencil2: Add more annotations * :rotating_light: Fix linting * :white_check_mark: Fix many tests for new annotations * :white_check_mark: Fix more tests * :bug: Fix missing flag * :sparkles: Annotate new functions * :white_check_mark: fix more tests * :white_check_mark: Fix signature issue * :white_check_mark: Make all csv tests pass * :white_check_mark: Fix remaining tests * :sparkles: New assets from annotations * :white_check_mark: Refactor test * :white_check_mark: Fix updated signature --- .../esql/functions/signature/coalesce.svg | 2 +- .../esql/functions/signature/concat.svg | 2 +- .../esql/functions/signature/date_extract.svg | 2 +- .../esql/functions/signature/ends_with.svg | 2 +- .../esql/functions/signature/left.svg | 2 +- .../esql/functions/signature/length.svg | 2 +- .../esql/functions/signature/mv_avg.svg | 2 +- .../esql/functions/signature/mv_median.svg | 2 +- .../esql/functions/signature/mv_sum.svg | 2 +- .../esql/functions/signature/replace.svg | 2 +- .../esql/functions/signature/right.svg | 2 +- .../esql/functions/signature/round.svg | 2 +- .../esql/functions/signature/split.svg | 2 +- .../esql/functions/signature/starts_with.svg | 2 +- .../esql/functions/signature/substring.svg | 2 +- .../esql/functions/types/case.asciidoc | 2 +- .../esql/functions/types/coalesce.asciidoc | 2 +- .../esql/functions/types/concat.asciidoc | 2 +- .../functions/types/date_extract.asciidoc | 2 +- .../esql/functions/types/ends_with.asciidoc | 3 +- .../esql/functions/types/is_finite.asciidoc | 5 - .../esql/functions/types/is_infinite.asciidoc | 5 - .../esql/functions/types/is_nan.asciidoc | 5 - .../esql/functions/types/left.asciidoc | 3 +- .../esql/functions/types/length.asciidoc | 3 +- .../esql/functions/types/mv_avg.asciidoc | 2 +- .../esql/functions/types/mv_median.asciidoc | 2 +- .../esql/functions/types/mv_sum.asciidoc | 2 +- .../esql/functions/types/replace.asciidoc | 2 +- .../esql/functions/types/right.asciidoc | 3 +- .../esql/functions/types/round.asciidoc | 2 +- .../esql/functions/types/split.asciidoc | 3 +- .../esql/functions/types/starts_with.asciidoc | 3 +- .../esql/functions/types/substring.asciidoc | 3 +- .../src/main/resources/show.csv-spec | 296 +++++++++--------- .../xpack/esql/action/EsqlActionIT.java | 3 +- .../function/EsqlFunctionRegistry.java | 18 +- .../expression/function/FunctionInfo.java | 2 + .../expression/function/aggregate/Avg.java | 5 +- .../expression/function/aggregate/Count.java | 25 +- .../function/aggregate/CountDistinct.java | 25 +- .../expression/function/aggregate/Max.java | 9 +- .../expression/function/aggregate/Median.java | 9 +- .../aggregate/MedianAbsoluteDeviation.java | 12 +- .../expression/function/aggregate/Min.java | 9 +- .../function/aggregate/Percentile.java | 13 +- .../expression/function/aggregate/Sum.java | 5 +- .../function/scalar/conditional/Case.java | 41 ++- .../function/scalar/conditional/Greatest.java | 5 + .../function/scalar/conditional/Least.java | 5 + .../function/scalar/convert/ToBoolean.java | 2 +- .../scalar/convert/ToCartesianPoint.java | 2 +- .../function/scalar/convert/ToDatetime.java | 2 +- .../function/scalar/convert/ToDegrees.java | 4 +- .../function/scalar/convert/ToDouble.java | 2 +- .../function/scalar/convert/ToGeoPoint.java | 2 +- .../function/scalar/convert/ToIP.java | 2 +- .../function/scalar/convert/ToInteger.java | 2 +- .../function/scalar/convert/ToLong.java | 2 +- .../function/scalar/convert/ToRadians.java | 4 +- .../function/scalar/convert/ToString.java | 16 +- .../scalar/convert/ToUnsignedLong.java | 2 +- .../function/scalar/convert/ToVersion.java | 2 +- .../function/scalar/date/DateExtract.java | 18 +- .../function/scalar/date/DateFormat.java | 16 +- .../function/scalar/date/DateTrunc.java | 15 +- .../expression/function/scalar/date/Now.java | 2 + .../function/scalar/ip/CIDRMatch.java | 9 +- .../expression/function/scalar/math/Abs.java | 4 +- .../expression/function/scalar/math/Acos.java | 4 +- .../expression/function/scalar/math/Asin.java | 4 +- .../expression/function/scalar/math/Atan.java | 4 +- .../function/scalar/math/Atan2.java | 9 +- .../function/scalar/math/AutoBucket.java | 4 +- .../expression/function/scalar/math/Ceil.java | 4 +- .../expression/function/scalar/math/Cos.java | 2 +- .../expression/function/scalar/math/Cosh.java | 2 +- .../expression/function/scalar/math/E.java | 2 + .../function/scalar/math/Floor.java | 7 +- .../function/scalar/math/Log10.java | 4 +- .../expression/function/scalar/math/Pi.java | 3 + .../expression/function/scalar/math/Pow.java | 6 +- .../function/scalar/math/Round.java | 15 +- .../expression/function/scalar/math/Sin.java | 2 +- .../expression/function/scalar/math/Sinh.java | 2 +- .../expression/function/scalar/math/Sqrt.java | 4 +- .../expression/function/scalar/math/Tan.java | 2 +- .../expression/function/scalar/math/Tanh.java | 2 +- .../expression/function/scalar/math/Tau.java | 2 + .../function/scalar/multivalue/MvAvg.java | 8 +- .../function/scalar/multivalue/MvCount.java | 14 +- .../function/scalar/multivalue/MvDedupe.java | 11 +- .../function/scalar/multivalue/MvFirst.java | 31 +- .../function/scalar/multivalue/MvLast.java | 31 +- .../function/scalar/multivalue/MvMax.java | 7 +- .../function/scalar/multivalue/MvMedian.java | 8 +- .../function/scalar/multivalue/MvMin.java | 7 +- .../function/scalar/multivalue/MvSum.java | 8 +- .../function/scalar/nulls/Coalesce.java | 20 +- .../function/scalar/string/Concat.java | 9 +- .../function/scalar/string/EndsWith.java | 12 +- .../function/scalar/string/Left.java | 7 +- .../function/scalar/string/Length.java | 5 +- .../function/scalar/string/Replace.java | 13 +- .../function/scalar/string/Right.java | 7 +- .../function/scalar/string/Split.java | 9 +- .../function/scalar/string/StartsWith.java | 12 +- .../function/scalar/string/Substring.java | 13 +- .../esql/plan/logical/show/ShowFunctions.java | 3 +- .../scalar/multivalue/MvSumTests.java | 1 - .../function/scalar/string/EndsWithTests.java | 15 + .../function/scalar/string/LeftTests.java | 13 + .../function/scalar/string/LengthTests.java | 48 ++- .../function/scalar/string/RightTests.java | 14 +- .../function/scalar/string/SplitTests.java | 16 + .../scalar/string/StartsWithTests.java | 15 + .../scalar/string/SubstringTests.java | 14 + 117 files changed, 801 insertions(+), 328 deletions(-) delete mode 100644 docs/reference/esql/functions/types/is_finite.asciidoc delete mode 100644 docs/reference/esql/functions/types/is_infinite.asciidoc delete mode 100644 docs/reference/esql/functions/types/is_nan.asciidoc diff --git a/docs/reference/esql/functions/signature/coalesce.svg b/docs/reference/esql/functions/signature/coalesce.svg index bfe80812327a9..22a70efead49c 100644 --- a/docs/reference/esql/functions/signature/coalesce.svg +++ b/docs/reference/esql/functions/signature/coalesce.svg @@ -1 +1 @@ -COALESCE(arg1,arg2) \ No newline at end of file +COALESCE(expression,expressionX) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/concat.svg b/docs/reference/esql/functions/signature/concat.svg index 1ca5a9bc2d06f..3ad2ae37b11c3 100644 --- a/docs/reference/esql/functions/signature/concat.svg +++ b/docs/reference/esql/functions/signature/concat.svg @@ -1 +1 @@ -CONCAT(arg1,arg2) \ No newline at end of file +CONCAT(first,rest) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/date_extract.svg b/docs/reference/esql/functions/signature/date_extract.svg index ec69633c02e8b..397cdd400d88c 100644 --- a/docs/reference/esql/functions/signature/date_extract.svg +++ b/docs/reference/esql/functions/signature/date_extract.svg @@ -1 +1 @@ -DATE_EXTRACT(arg1,arg2) \ No newline at end of file +DATE_EXTRACT(date_part,field) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/ends_with.svg b/docs/reference/esql/functions/signature/ends_with.svg index bf2cb47ed0be0..575452e1bb8c6 100644 --- a/docs/reference/esql/functions/signature/ends_with.svg +++ b/docs/reference/esql/functions/signature/ends_with.svg @@ -1 +1 @@ -ENDS_WITH(arg1,arg2) \ No newline at end of file +ENDS_WITH(str,suffix) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/left.svg b/docs/reference/esql/functions/signature/left.svg index ec14bf8c72131..75704982af004 100644 --- a/docs/reference/esql/functions/signature/left.svg +++ b/docs/reference/esql/functions/signature/left.svg @@ -1 +1 @@ -LEFT(string,length) \ No newline at end of file +LEFT(str,length) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/length.svg b/docs/reference/esql/functions/signature/length.svg index 65c3f4a9db89a..d199f1a9a0170 100644 --- a/docs/reference/esql/functions/signature/length.svg +++ b/docs/reference/esql/functions/signature/length.svg @@ -1 +1 @@ -LENGTH(arg1) \ No newline at end of file +LENGTH(str) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/mv_avg.svg b/docs/reference/esql/functions/signature/mv_avg.svg index 4fc02033e4fdb..4c2371eac0b44 100644 --- a/docs/reference/esql/functions/signature/mv_avg.svg +++ b/docs/reference/esql/functions/signature/mv_avg.svg @@ -1 +1 @@ -MV_AVG(arg1) \ No newline at end of file +MV_AVG(field) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/mv_median.svg b/docs/reference/esql/functions/signature/mv_median.svg index 3fd5dd009b143..b287fde6dd97e 100644 --- a/docs/reference/esql/functions/signature/mv_median.svg +++ b/docs/reference/esql/functions/signature/mv_median.svg @@ -1 +1 @@ -MV_MEDIAN(arg1) \ No newline at end of file +MV_MEDIAN(v) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/mv_sum.svg b/docs/reference/esql/functions/signature/mv_sum.svg index ff0fd374025ac..3e3fbd30355b1 100644 --- a/docs/reference/esql/functions/signature/mv_sum.svg +++ b/docs/reference/esql/functions/signature/mv_sum.svg @@ -1 +1 @@ -MV_SUM(arg1) \ No newline at end of file +MV_SUM(v) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/replace.svg b/docs/reference/esql/functions/signature/replace.svg index 7c86c00d019cb..bbcd11bcc0ab6 100644 --- a/docs/reference/esql/functions/signature/replace.svg +++ b/docs/reference/esql/functions/signature/replace.svg @@ -1 +1 @@ -REPLACE(arg1,arg2,arg3) \ No newline at end of file +REPLACE(str,regex,newStr) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/right.svg b/docs/reference/esql/functions/signature/right.svg index 0afa5dbf01f16..969a6c9442479 100644 --- a/docs/reference/esql/functions/signature/right.svg +++ b/docs/reference/esql/functions/signature/right.svg @@ -1 +1 @@ -RIGHT(string,length) \ No newline at end of file +RIGHT(str,length) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/round.svg b/docs/reference/esql/functions/signature/round.svg index 42a2da87527d9..9da0b9d11213e 100644 --- a/docs/reference/esql/functions/signature/round.svg +++ b/docs/reference/esql/functions/signature/round.svg @@ -1 +1 @@ -ROUND(arg1,arg2) \ No newline at end of file +ROUND(value,decimals) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/split.svg b/docs/reference/esql/functions/signature/split.svg index cd31aef97f8a5..1213f6041b0c4 100644 --- a/docs/reference/esql/functions/signature/split.svg +++ b/docs/reference/esql/functions/signature/split.svg @@ -1 +1 @@ -SPLIT(arg1,arg2) \ No newline at end of file +SPLIT(str,delim) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/starts_with.svg b/docs/reference/esql/functions/signature/starts_with.svg index 0a2d3a1ef0c36..85fcbfa766ae4 100644 --- a/docs/reference/esql/functions/signature/starts_with.svg +++ b/docs/reference/esql/functions/signature/starts_with.svg @@ -1 +1 @@ -STARTS_WITH(arg1,arg2) \ No newline at end of file +STARTS_WITH(str,prefix) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/substring.svg b/docs/reference/esql/functions/signature/substring.svg index 6df5da30f67ed..1f9f6e8c3afa0 100644 --- a/docs/reference/esql/functions/signature/substring.svg +++ b/docs/reference/esql/functions/signature/substring.svg @@ -1 +1 @@ -SUBSTRING(arg1,arg2,arg3) \ No newline at end of file +SUBSTRING(str,start,length) \ No newline at end of file diff --git a/docs/reference/esql/functions/types/case.asciidoc b/docs/reference/esql/functions/types/case.asciidoc index 7062d7000115a..3bf3d8ad3d713 100644 --- a/docs/reference/esql/functions/types/case.asciidoc +++ b/docs/reference/esql/functions/types/case.asciidoc @@ -1,5 +1,5 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -arg1 | arg2 | result +condition | rest | result |=== diff --git a/docs/reference/esql/functions/types/coalesce.asciidoc b/docs/reference/esql/functions/types/coalesce.asciidoc index e36316ab87bb5..2daf6126d6fb0 100644 --- a/docs/reference/esql/functions/types/coalesce.asciidoc +++ b/docs/reference/esql/functions/types/coalesce.asciidoc @@ -1,6 +1,6 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -arg1 | arg2 | result +expression | expressionX | result boolean | boolean | boolean integer | integer | integer keyword | keyword | keyword diff --git a/docs/reference/esql/functions/types/concat.asciidoc b/docs/reference/esql/functions/types/concat.asciidoc index f422b45f0b34c..1f14abf9c498f 100644 --- a/docs/reference/esql/functions/types/concat.asciidoc +++ b/docs/reference/esql/functions/types/concat.asciidoc @@ -1,6 +1,6 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -arg1 | arg2 | result +first | rest | result keyword | keyword | keyword text | text | keyword |=== diff --git a/docs/reference/esql/functions/types/date_extract.asciidoc b/docs/reference/esql/functions/types/date_extract.asciidoc index 9963c85b2af85..edd244548fb18 100644 --- a/docs/reference/esql/functions/types/date_extract.asciidoc +++ b/docs/reference/esql/functions/types/date_extract.asciidoc @@ -1,5 +1,5 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -arg1 | arg2 | result +date_part | field | result keyword | datetime | long |=== diff --git a/docs/reference/esql/functions/types/ends_with.asciidoc b/docs/reference/esql/functions/types/ends_with.asciidoc index 6c406b80c0cad..88489185b41f7 100644 --- a/docs/reference/esql/functions/types/ends_with.asciidoc +++ b/docs/reference/esql/functions/types/ends_with.asciidoc @@ -1,5 +1,6 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -arg1 | arg2 | result +str | suffix | result keyword | keyword | boolean +text | text | boolean |=== diff --git a/docs/reference/esql/functions/types/is_finite.asciidoc b/docs/reference/esql/functions/types/is_finite.asciidoc deleted file mode 100644 index e4883bdc1c076..0000000000000 --- a/docs/reference/esql/functions/types/is_finite.asciidoc +++ /dev/null @@ -1,5 +0,0 @@ -[%header.monospaced.styled,format=dsv,separator=|] -|=== -n | result -double | boolean -|=== diff --git a/docs/reference/esql/functions/types/is_infinite.asciidoc b/docs/reference/esql/functions/types/is_infinite.asciidoc deleted file mode 100644 index e4883bdc1c076..0000000000000 --- a/docs/reference/esql/functions/types/is_infinite.asciidoc +++ /dev/null @@ -1,5 +0,0 @@ -[%header.monospaced.styled,format=dsv,separator=|] -|=== -n | result -double | boolean -|=== diff --git a/docs/reference/esql/functions/types/is_nan.asciidoc b/docs/reference/esql/functions/types/is_nan.asciidoc deleted file mode 100644 index e4883bdc1c076..0000000000000 --- a/docs/reference/esql/functions/types/is_nan.asciidoc +++ /dev/null @@ -1,5 +0,0 @@ -[%header.monospaced.styled,format=dsv,separator=|] -|=== -n | result -double | boolean -|=== diff --git a/docs/reference/esql/functions/types/left.asciidoc b/docs/reference/esql/functions/types/left.asciidoc index c30a055f3be49..6899a408969f7 100644 --- a/docs/reference/esql/functions/types/left.asciidoc +++ b/docs/reference/esql/functions/types/left.asciidoc @@ -1,5 +1,6 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -string | length | result +str | length | result keyword | integer | keyword +text | integer | keyword |=== diff --git a/docs/reference/esql/functions/types/length.asciidoc b/docs/reference/esql/functions/types/length.asciidoc index 9af62defcb2a9..de84fe63c794a 100644 --- a/docs/reference/esql/functions/types/length.asciidoc +++ b/docs/reference/esql/functions/types/length.asciidoc @@ -1,5 +1,6 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -arg1 | result +str | result keyword | integer +text | integer |=== diff --git a/docs/reference/esql/functions/types/mv_avg.asciidoc b/docs/reference/esql/functions/types/mv_avg.asciidoc index dd4f6b0725cc8..0bba9b341c301 100644 --- a/docs/reference/esql/functions/types/mv_avg.asciidoc +++ b/docs/reference/esql/functions/types/mv_avg.asciidoc @@ -1,6 +1,6 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -arg1 | result +field | result double | double integer | double long | double diff --git a/docs/reference/esql/functions/types/mv_median.asciidoc b/docs/reference/esql/functions/types/mv_median.asciidoc index f1831429aa95c..4bb9cf6c7a1cb 100644 --- a/docs/reference/esql/functions/types/mv_median.asciidoc +++ b/docs/reference/esql/functions/types/mv_median.asciidoc @@ -1,6 +1,6 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -arg1 | result +v | result double | double integer | integer long | long diff --git a/docs/reference/esql/functions/types/mv_sum.asciidoc b/docs/reference/esql/functions/types/mv_sum.asciidoc index f1831429aa95c..4bb9cf6c7a1cb 100644 --- a/docs/reference/esql/functions/types/mv_sum.asciidoc +++ b/docs/reference/esql/functions/types/mv_sum.asciidoc @@ -1,6 +1,6 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -arg1 | result +v | result double | double integer | integer long | long diff --git a/docs/reference/esql/functions/types/replace.asciidoc b/docs/reference/esql/functions/types/replace.asciidoc index 6824d1fd97128..8c2be37bd63a0 100644 --- a/docs/reference/esql/functions/types/replace.asciidoc +++ b/docs/reference/esql/functions/types/replace.asciidoc @@ -1,6 +1,6 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -arg1 | arg2 | arg3 | result +str | regex | newStr | result keyword | keyword | keyword | keyword keyword | keyword | text | keyword keyword | text | keyword | keyword diff --git a/docs/reference/esql/functions/types/right.asciidoc b/docs/reference/esql/functions/types/right.asciidoc index c30a055f3be49..6899a408969f7 100644 --- a/docs/reference/esql/functions/types/right.asciidoc +++ b/docs/reference/esql/functions/types/right.asciidoc @@ -1,5 +1,6 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -string | length | result +str | length | result keyword | integer | keyword +text | integer | keyword |=== diff --git a/docs/reference/esql/functions/types/round.asciidoc b/docs/reference/esql/functions/types/round.asciidoc index 5ba9e2f776d75..33e89c91f0bfe 100644 --- a/docs/reference/esql/functions/types/round.asciidoc +++ b/docs/reference/esql/functions/types/round.asciidoc @@ -1,5 +1,5 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -arg1 | arg2 | result +value | decimals | result double | integer | double |=== diff --git a/docs/reference/esql/functions/types/split.asciidoc b/docs/reference/esql/functions/types/split.asciidoc index f1f744dbe4126..4b5e6856c8fe2 100644 --- a/docs/reference/esql/functions/types/split.asciidoc +++ b/docs/reference/esql/functions/types/split.asciidoc @@ -1,5 +1,6 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -arg1 | arg2 | result +str | delim | result keyword | keyword | keyword +text | text | keyword |=== diff --git a/docs/reference/esql/functions/types/starts_with.asciidoc b/docs/reference/esql/functions/types/starts_with.asciidoc index 6c406b80c0cad..863ddef3c0361 100644 --- a/docs/reference/esql/functions/types/starts_with.asciidoc +++ b/docs/reference/esql/functions/types/starts_with.asciidoc @@ -1,5 +1,6 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -arg1 | arg2 | result +str | prefix | result keyword | keyword | boolean +text | text | boolean |=== diff --git a/docs/reference/esql/functions/types/substring.asciidoc b/docs/reference/esql/functions/types/substring.asciidoc index 2aa96ceeb7e43..f12a40c9253fb 100644 --- a/docs/reference/esql/functions/types/substring.asciidoc +++ b/docs/reference/esql/functions/types/substring.asciidoc @@ -1,5 +1,6 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -arg1 | arg2 | arg3 | result +str | start | length | result keyword | integer | integer | keyword +text | integer | integer | keyword |=== diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec index a7dc82263a86e..5e78ee76c046c 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec @@ -9,91 +9,91 @@ v:long showFunctions#[skip:-8.12.99] show functions; - name:keyword | synopsis:keyword | argNames:keyword | argTypes:keyword | argDescriptions:keyword |returnType:keyword | description:keyword | optionalArgs:boolean | variadic:boolean -abs |"integer|long|double|unsigned_long abs(n:integer|long|double|unsigned_long)" |n |"integer|long|double|unsigned_long" | "" |"integer|long|double|unsigned_long" | "" | false | false -acos |"double acos(n:integer|long|double|unsigned_long)" |n |"integer|long|double|unsigned_long" | "" |double | "" | false | false -asin |"double asin(n:integer|long|double|unsigned_long)"|n |"integer|long|double|unsigned_long" | "" |double | "" | false | false -atan |"double atan(n:integer|long|double|unsigned_long)" |n |"integer|long|double|unsigned_long" | "" |double | "" | false | false -atan2 |"double atan2(y:integer|long|double|unsigned_long, x:integer|long|double|unsigned_long)" |[y, x] |["integer|long|double|unsigned_long", "integer|long|double|unsigned_long"] |["", ""] |double | "" | [false, false] | false -auto_bucket |"double|date auto_bucket(field:integer|long|double|date, buckets:integer, from:integer|long|double|date, to:integer|long|double|date)" |[field, buckets, from, to] |["integer|long|double|date", "integer", "integer|long|double|date", "integer|long|double|date"] |["", "", "", ""] | "double|date" | "" | [false, false, false, false] | false -avg |? avg(arg1:?) |arg1 |? | "" |? | "" | false | false -case |? case(arg1:?, arg2...:?) |[arg1, arg2] |[?, ?] |["", ""] |? | "" | [false, false] | true -ceil |"? ceil(n:integer|long|double|unsigned_long)" |n |"integer|long|double|unsigned_long" | "" |? | "" | false | false -cidr_match |? cidr_match(arg1:?, arg2...:?) |[arg1, arg2] |[?, ?] |["", ""] |? | "" | [false, false] | true -coalesce |? coalesce(arg1:?, arg2...:?) |[arg1, arg2] |[?, ?] |["", ""] |? | "" | [false, false] | true -concat |? concat(arg1:?, arg2...:?) |[arg1, arg2] |[?, ?] |["", ""] |? | "" | [false, false] | true -cos |"double cos(n:integer|long|double|unsigned_long)" |n |"integer|long|double|unsigned_long" | "An angle, in radians" |double | "Returns the trigonometric cosine of an angle" | false | false -cosh |"double cosh(n:integer|long|double|unsigned_long)" |n |"integer|long|double|unsigned_long" | "The number who's hyperbolic cosine is to be returned" |double | "Returns the hyperbolic cosine of a number" | false | false -count |? count(arg1:?) |arg1 |? | "" |? | "" | false | false -count_distinct |? count_distinct(arg1:?, arg2:?) |[arg1, arg2] |[?, ?] |["", ""] |? | "" | [false, false] | false -date_diff |"integer date_diff(unit:keyword|text, startTimestamp:date, endTimestamp:date)"|[unit, startTimestamp, endTimestamp] |["keyword|text", "date", "date"] |["A valid date unit", "A string representing a start timestamp", "A string representing an end timestamp"] |integer | "Subtract 2 dates and return their difference in multiples of a unit specified in the 1st argument" | [false, false, false] | false -date_extract |? date_extract(arg1:?, arg2:?) |[arg1, arg2] |[?, ?] |["", ""] |? | "" | [false, false] | false -date_format |? date_format(arg1:?, arg2:?) |[arg1, arg2] |[?, ?] |["", ""] |? | "" | [false, false] | false -date_parse |"date date_parse(?datePattern:keyword, dateString:keyword|text)"|[datePattern, dateString]|["keyword", "keyword|text"]|[A valid date pattern, A string representing a date]|date |Parses a string into a date value | [true, false] | false -date_trunc |? date_trunc(arg1:?, arg2:?) |[arg1, arg2] |[?, ?] |["", ""] |? | "" | [false, false] | false -e |? e() | null | null | null |? | "" | null | false -ends_with |? ends_with(arg1:?, arg2:?) |[arg1, arg2] |[?, ?] |["", ""] |? | "" | [false, false] | false -floor |"? floor(n:integer|long|double|unsigned_long)" |n |"integer|long|double|unsigned_long" | "" |? | "" | false | false -greatest |"? greatest(first:integer|long|double|boolean|keyword|text|ip|version, rest...:integer|long|double|boolean|keyword|text|ip|version)" |[first, rest] |["integer|long|double|boolean|keyword|text|ip|version", "integer|long|double|boolean|keyword|text|ip|version"] |["", ""] |? | "" | [false, false] | true -least |"? least(first:integer|long|double|boolean|keyword|text|ip|version, rest...:integer|long|double|boolean|keyword|text|ip|version)" |[first, rest] |["integer|long|double|boolean|keyword|text|ip|version", "integer|long|double|boolean|keyword|text|ip|version"] |["", ""] |? | "" | [false, false] | true -left |"? left(string:keyword, length:integer)" |[string, length] |["keyword", "integer"] |["", ""] |? | "" | [false, false] | false -length |? length(arg1:?) |arg1 |? | "" |? | "" | false | false -log10 |"? log10(n:integer|long|double|unsigned_long)" |n |"integer|long|double|unsigned_long" | "" |? | "" | false | false -ltrim |"keyword|text ltrim(str:keyword|text)" |str |"keyword|text" | "" |"keyword|text" |Removes leading whitespaces from a string.| false | false -max |? max(arg1:?) |arg1 |? | "" |? | "" | false | false -median |? median(arg1:?) |arg1 |? | "" |? | "" | false | false -median_absolute_deviation|? median_absolute_deviation(arg1:?) |arg1 |? | "" |? | "" | false | false -min |? min(arg1:?) |arg1 |? | "" |? | "" | false | false -mv_avg |? mv_avg(arg1:?) |arg1 |? | "" |? | "" | false | false -mv_concat |"keyword mv_concat(v:text|keyword, delim:text|keyword)" |[v, delim] |["text|keyword", "text|keyword"] |["values to join", "delimiter"] |keyword | "Reduce a multivalued string field to a single valued field by concatenating all values." | [false, false] | false -mv_count |"integer mv_count(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point)" |v | "unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point" | "" | integer | "Reduce a multivalued field to a single valued field containing the count of values." | false | false -mv_dedupe |"? mv_dedupe(v:boolean|date|double|ip|text|integer|keyword|version|long)" |v | "boolean|date|double|ip|text|integer|keyword|version|long" | "" |? | "Remove duplicate values from a multivalued field." | false | false -mv_first |"? mv_first(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point)" |v | "unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point" | "" |? | "Reduce a multivalued field to a single valued field containing the first value." | false | false -mv_last |"? mv_last(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point)" |v | "unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point" | "" |? | "Reduce a multivalued field to a single valued field containing the last value." | false | false -mv_max |"? mv_max(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long)" |v | "unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long" | "" |? | "Reduce a multivalued field to a single valued field containing the maximum value." | false | false -mv_median |? mv_median(arg1:?) |arg1 |? | "" |? | "" | false | false -mv_min |"? mv_min(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long)" |v | "unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long" | "" |? | "Reduce a multivalued field to a single valued field containing the minimum value." | false | false -mv_sum |? mv_sum(arg1:?) |arg1 |? | "" |? | "" | false | false -now |? now() | null |null | null |? | "" | null | false -percentile |? percentile(arg1:?, arg2:?) |[arg1, arg2] |[?, ?] |["", ""] |? | "" | [false, false] | false -pi |? pi() | null | null | null |? | "" | null | false -pow |"? pow(base:integer|unsigned_long|long|double, exponent:integer|unsigned_long|long|double)" |[base, exponent] |["integer|unsigned_long|long|double", "integer|unsigned_long|long|double"] |["", ""] |? | "" | [false, false] | false -replace |"? replace(arg1:?, arg2:?, arg3:?)" | [arg1, arg2, arg3] | [?, ?, ?] |["", "", ""] |? | "" | [false, false, false]| false -right |"? right(string:keyword, length:integer)" |[string, length] |["keyword", "integer"] |["", ""] |? | "" | [false, false] | false -round |? round(arg1:?, arg2:?) |[arg1, arg2] |[?, ?] |["", ""] |? | "" | [false, false] | false -rtrim |"keyword|text rtrim(str:keyword|text)" |str |"keyword|text" | "" |"keyword|text" |Removes trailing whitespaces from a string.| false | false -sin |"double sin(n:integer|long|double|unsigned_long)" |n |"integer|long|double|unsigned_long" |An angle, in radians |double |Returns the trigonometric sine of an angle | false | false -sinh |"double sinh(n:integer|long|double|unsigned_long)"|n |"integer|long|double|unsigned_long" | "The number to return the hyperbolic sine of" |double | "Returns the hyperbolic sine of a number" | false | false -split |? split(arg1:?, arg2:?) |[arg1, arg2] |[?, ?] |["", ""] |? | "" | [false, false] | false -sqrt |"? sqrt(n:integer|long|double|unsigned_long)" |n |"integer|long|double|unsigned_long" | "" |? | "" | false | false -starts_with |? starts_with(arg1:?, arg2:?) |[arg1, arg2] |[?, ?] |["", ""] |? | "" | [false, false] | false -substring |? substring(arg1:?, arg2:?, arg3:?) |[arg1, arg2, arg3] |[?, ?, ?] |["", "", ""] |? | "" | [false, false, false]| false -sum |? sum(arg1:?) |arg1 |? | "" |? | "" | false | false -tan |"double tan(n:integer|long|double|unsigned_long)" |n |"integer|long|double|unsigned_long" | "An angle, in radians" |double | "Returns the trigonometric tangent of an angle" | false | false -tanh |"double tanh(n:integer|long|double|unsigned_long)" |n |"integer|long|double|unsigned_long" | "The number to return the hyperbolic tangent of" |double | "Returns the hyperbolic tangent of a number" | false | false -tau |? tau() | null | null | null |? | "" | null | false -to_bool |"boolean to_bool(v:boolean|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|keyword|text|double|long|unsigned_long|integer" | |boolean | |false |false -to_boolean |"boolean to_boolean(v:boolean|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|keyword|text|double|long|unsigned_long|integer" | |boolean | |false |false -to_cartesianpoint |"cartesian_point to_cartesianpoint(v:cartesian_point|keyword|text)" |v |"cartesian_point|keyword|text" | |cartesian_point | |false |false -to_datetime |"date to_datetime(v:date|keyword|text|double|long|unsigned_long|integer)" |v |"date|keyword|text|double|long|unsigned_long|integer" | |date | |false |false -to_dbl |"double to_dbl(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |double | |false |false -to_degrees |"double to_degrees(v:double|long|unsigned_long|integer)" |v |"double|long|unsigned_long|integer" | |double | |false |false -to_double |"double to_double(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |double | |false |false -to_dt |"date to_dt(v:date|keyword|text|double|long|unsigned_long|integer)" |v |"date|keyword|text|double|long|unsigned_long|integer" | |date | |false |false -to_geopoint |"geo_point to_geopoint(v:geo_point|keyword|text)" |v |"geo_point|keyword|text" | |geo_point | |false |false -to_int |"integer to_int(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |integer | |false |false -to_integer |"integer to_integer(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |integer | |false |false -to_ip |"ip to_ip(v:ip|keyword|text)" |v |"ip|keyword|text" | |ip | |false |false -to_long |"long to_long(v:boolean|date|keyword|text|double|long|unsigned_long|integer|geo_point|cartesian_point)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer|geo_point|cartesian_point" | |long | |false |false -to_radians |"double to_radians(v:double|long|unsigned_long|integer)" |v |"double|long|unsigned_long|integer" | |double | |false |false -to_str |"keyword to_str(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point)" |v |"unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point" | |keyword | |false |false -to_string |"keyword to_string(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point)" |v |"unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point" | |keyword | |false |false -to_ul |"unsigned_long to_ul(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |unsigned_long | |false |false -to_ulong |"unsigned_long to_ulong(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |unsigned_long | |false |false -to_unsigned_long |"unsigned_long to_unsigned_long(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |unsigned_long | |false |false -to_ver |"version to_ver(v:keyword|text|version)" |v |"keyword|text|version" | |version | |false |false -to_version |"version to_version(v:keyword|text|version)" |v |"keyword|text|version" | |version | |false |false -trim |"keyword|text trim(str:keyword|text)" |str |"keyword|text" | "" |"keyword|text" |Removes leading and trailing whitespaces from a string.| false | false + name:keyword | synopsis:keyword | argNames:keyword | argTypes:keyword | argDescriptions:keyword |returnType:keyword | description:keyword | optionalArgs:boolean | variadic:boolean | isAggregation:boolean +abs |"double|integer|long|unsigned_long abs(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "" |"double|integer|long|unsigned_long" | "Returns the absolute value." | false | false | false +acos |"double acos(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "" |double | "The arccosine of an angle, expressed in radians." | false | false | false +asin |"double asin(n:double|integer|long|unsigned_long)"|n |"double|integer|long|unsigned_long" | "" |double | "Inverse sine trigonometric function." | false | false | false +atan |"double atan(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "" |double | "Inverse tangent trigonometric function." | false | false | false +atan2 |"double atan2(y:double|integer|long|unsigned_long, x:double|integer|long|unsigned_long)" |[y, x] |["double|integer|long|unsigned_long", "double|integer|long|unsigned_long"] |["", ""] |double | "The angle between the positive x-axis and the ray from the origin to the point (x , y) in the Cartesian plane." | [false, false] | false | false +auto_bucket |"double|date auto_bucket(field:integer|long|double|date, buckets:integer, from:integer|long|double|date, to:integer|long|double|date)" |[field, buckets, from, to] |["integer|long|double|date", "integer", "integer|long|double|date", "integer|long|double|date"] |["", "", "", ""] | "double|date" | "Creates human-friendly buckets and returns a datetime value for each row that corresponds to the resulting bucket the row falls into." | [false, false, false, false] | false | false +avg |"double avg(field:double|integer|long|unsigned_long)" |field |"double|integer|long|unsigned_long" | "" |double | "The average of a numeric field." | false | false | true +case |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version case(condition:boolean, rest...:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" |[condition, rest] |["boolean", "boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version"] |["", ""] |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version" | "Accepts pairs of conditions and values. The function returns the value that belongs to the first condition that evaluates to true." | [false, false] | true | false +ceil |"double|integer|long|unsigned_long ceil(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "" | "double|integer|long|unsigned_long" | "Round a number up to the nearest integer." | false | false | false +cidr_match |boolean cidr_match(ip:ip, blockX...:keyword) |[ip, blockX] |[ip, keyword] |["", "CIDR block to test the IP against."] |boolean | "Returns true if the provided IP is contained in one of the provided CIDR blocks." | [false, false] | true | false +coalesce |"boolean|text|integer|keyword|long coalesce(expression:boolean|text|integer|keyword|long, expressionX...:boolean|text|integer|keyword|long)" |[expression, expressionX] |["boolean|text|integer|keyword|long", "boolean|text|integer|keyword|long"] |["Expression to evaluate", "Other expression to evaluate"] |"boolean|text|integer|keyword|long" | "Returns the first of its arguments that is not null." | [false, false] | true | false +concat |"keyword concat(first:keyword|text, rest...:keyword|text)" |[first, rest] |["keyword|text", "keyword|text"] |["", ""] |keyword | "Concatenates two or more strings." | [false, false] | true | false +cos |"double cos(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "An angle, in radians" |double | "Returns the trigonometric cosine of an angle" | false | false | false +cosh |"double cosh(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "The number who's hyperbolic cosine is to be returned" |double | "Returns the hyperbolic cosine of a number" | false | false | false +count |"long count(?field:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" |field |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version" | "Column or literal for which to count the number of values." |long | "Returns the total number (count) of input values." | true | false | true +count_distinct |"long count_distinct(field:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version, ?precision:integer)" |[field, precision] |["boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version, integer"] |["Column or literal for which to count the number of distinct values.", ""] |long | "Returns the approximate number of distinct values." | [false, true] | false | true +date_diff |"integer date_diff(unit:keyword|text, startTimestamp:date, endTimestamp:date)"|[unit, startTimestamp, endTimestamp] |["keyword|text", "date", "date"] |["A valid date unit", "A string representing a start timestamp", "A string representing an end timestamp"] |integer | "Subtract 2 dates and return their difference in multiples of a unit specified in the 1st argument" | [false, false, false] | false | false +date_extract |long date_extract(date_part:keyword, field:date) |[date_part, field] |[keyword, date] |["Part of the date to extract. Can be: aligned_day_of_week_in_month; aligned_day_of_week_in_year; aligned_week_of_month; aligned_week_of_year; ampm_of_day; clock_hour_of_ampm; clock_hour_of_day; day_of_month; day_of_week; day_of_year; epoch_day; era; hour_of_ampm; hour_of_day; instant_seconds; micro_of_day; micro_of_second; milli_of_day; milli_of_second; minute_of_day; minute_of_hour; month_of_year; nano_of_day; nano_of_second; offset_seconds; proleptic_month; second_of_day; second_of_minute; year; or year_of_era.", "Date expression"] |long | "Extracts parts of a date, like year, month, day, hour." | [false, false] | false | false +date_format |keyword date_format(?format:keyword, date:date) |[format, date] |[keyword, date] |["A valid date pattern", "Date expression"] |keyword | "Returns a string representation of a date, in the provided format." | [true, false] | false | false +date_parse |"date date_parse(?datePattern:keyword, dateString:keyword|text)"|[datePattern, dateString]|["keyword", "keyword|text"]|["A valid date pattern", "A string representing a date"]|date |Parses a string into a date value | [true, false] | false | false +date_trunc |"date date_trunc(interval:keyword, date:date)" |[interval, date] |[keyword, date] |["Interval; expressed using the timespan literal syntax.", "Date expression"] |date | "Rounds down a date to the closest interval." | [false, false] | false | false +e |double e() | null | null | null |double | "Euler’s number." | null | false | false +ends_with |"boolean ends_with(str:keyword|text, suffix:keyword|text)" |[str, suffix] |["keyword|text", "keyword|text"] |["", ""] |boolean | "Returns a boolean that indicates whether a keyword string ends with another string" | [false, false] | false | false +floor |"double|integer|long|unsigned_long floor(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "" |"double|integer|long|unsigned_long" | "Round a number down to the nearest integer." | false | false | false +greatest |"integer|long|double|boolean|keyword|text|ip|version greatest(first:integer|long|double|boolean|keyword|text|ip|version, rest...:integer|long|double|boolean|keyword|text|ip|version)" |[first, rest] |["integer|long|double|boolean|keyword|text|ip|version", "integer|long|double|boolean|keyword|text|ip|version"] |["", ""] |"integer|long|double|boolean|keyword|text|ip|version" | "Returns the maximum value from many columns." | [false, false] | true | false +least |"integer|long|double|boolean|keyword|text|ip|version least(first:integer|long|double|boolean|keyword|text|ip|version, rest...:integer|long|double|boolean|keyword|text|ip|version)" |[first, rest] |["integer|long|double|boolean|keyword|text|ip|version", "integer|long|double|boolean|keyword|text|ip|version"] |["", ""] |"integer|long|double|boolean|keyword|text|ip|version" | "Returns the minimum value from many columns." | [false, false] | true | false +left |"keyword left(str:keyword|text, length:integer)" |[str, length] |["keyword|text", "integer"] |["", ""] |keyword | "Return the substring that extracts length chars from the string starting from the left." | [false, false] | false | false +length |"integer length(str:keyword|text)" |str |"keyword|text" | "" |integer | "Returns the character length of a string." | false | false | false +log10 |"double log10(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "" |double | "Returns the log base 10." | false | false | false +ltrim |"keyword|text ltrim(str:keyword|text)" |str |"keyword|text" | "" |"keyword|text" |Removes leading whitespaces from a string.| false | false | false +max |"double|integer|long|unsigned_long max(field:double|integer|long|unsigned_long)" |field |"double|integer|long|unsigned_long" | "" |"double|integer|long|unsigned_long" | "The maximum value of a numeric field." | false | false | true +median |"double|integer|long|unsigned_long median(field:double|integer|long|unsigned_long)" |field |"double|integer|long|unsigned_long" | "" |"double|integer|long|unsigned_long" | "The value that is greater than half of all values and less than half of all values." | false | false | true +median_absolute_deviation|"double|integer|long|unsigned_long median_absolute_deviation(field:double|integer|long|unsigned_long)" |field |"double|integer|long|unsigned_long" | "" |"double|integer|long|unsigned_long" | "The median absolute deviation, a measure of variability." | false | false | true +min |"double|integer|long|unsigned_long min(field:double|integer|long|unsigned_long)" |field |"double|integer|long|unsigned_long" | "" |"double|integer|long|unsigned_long" | "The minimum value of a numeric field." | false | false | true +mv_avg |"double mv_avg(field:double|integer|long|unsigned_long)" |field |"double|integer|long|unsigned_long" | "" |double | "Converts a multivalued field into a single valued field containing the average of all of the values." | false | false | false +mv_concat |"keyword mv_concat(v:text|keyword, delim:text|keyword)" |[v, delim] |["text|keyword", "text|keyword"] |["values to join", "delimiter"] |keyword | "Reduce a multivalued string field to a single valued field by concatenating all values." | [false, false] | false | false +mv_count |"integer mv_count(v:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" |v | "boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version" | "" | integer | "Reduce a multivalued field to a single valued field containing the count of values." | false | false | false +mv_dedupe |"boolean|date|double|integer|ip|keyword|long|text|version mv_dedupe(v:boolean|date|double|integer|ip|keyword|long|text|version)" |v | "boolean|date|double|integer|ip|keyword|long|text|version" | "" |"boolean|date|double|integer|ip|keyword|long|text|version" | "Remove duplicate values from a multivalued field." | false | false | false +mv_first |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version mv_first(v:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" |v | "boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version" | "" |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version" | "Reduce a multivalued field to a single valued field containing the first value." | false | false | false +mv_last |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version mv_last(v:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" |v | "boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version" | "" |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version" | "Reduce a multivalued field to a single valued field containing the last value." | false | false | false +mv_max |"boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version mv_max(v:boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version)" |v | "boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version" | "" |"boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version" | "Reduce a multivalued field to a single valued field containing the maximum value." | false | false | false +mv_median |"double|integer|long|unsigned_long mv_median(v:double|integer|long|unsigned_long)" |v |"double|integer|long|unsigned_long" | "" |"double|integer|long|unsigned_long" | "Converts a multivalued field into a single valued field containing the median value." | false | false | false +mv_min |"boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version mv_min(v:boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version)" |v | "boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version" | "" |"boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version" | "Reduce a multivalued field to a single valued field containing the minimum value." | false | false | false +mv_sum |"double|integer|long|unsigned_long mv_sum(v:double|integer|long|unsigned_long)" |v |"double|integer|long|unsigned_long" | "" |"double|integer|long|unsigned_long" | "Converts a multivalued field into a single valued field containing the sum of all of the values." | false | false | false +now |date now() | null |null | null |date | "Returns current date and time." | null | false | false +percentile |"double|integer|long|unsigned_long percentile(field:double|integer|long|unsigned_long, percentile:double|integer|long)" |[field, percentile] |["double|integer|long|unsigned_long, double|integer|long"] |["", ""] |"double|integer|long|unsigned_long" | "The value at which a certain percentage of observed values occur." | [false, false] | false | true +pi |double pi() | null | null | null |double | "The ratio of a circle’s circumference to its diameter." | null | false | false +pow |"double pow(base:double|integer|long|unsigned_long, exponent:double|integer|long|unsigned_long)" |[base, exponent] |["double|integer|long|unsigned_long", "double|integer|long|unsigned_long"] |["", ""] |double | "Returns the value of a base raised to the power of an exponent." | [false, false] | false | false +replace |"keyword replace(str:keyword|text, regex:keyword|text, newStr:keyword|text)" | [str, regex, newStr] | ["keyword|text", "keyword|text", "keyword|text"] |["", "", ""] |keyword | "The function substitutes in the string any match of the regular expression with the replacement string." | [false, false, false]| false | false +right |"keyword right(str:keyword|text, length:integer)" |[str, length] |["keyword|text", "integer"] |["", ""] |keyword | "Return the substring that extracts length chars from the string starting from the right." | [false, false] | false | false +round |"double round(value:double, ?decimals:integer)" |[value, decimals] |["double", "integer"] |["The numeric value to round", "The number of decimal places to round to. Defaults to 0."] |double | "Rounds a number to the closest number with the specified number of digits." | [false, true] | false | false +rtrim |"keyword|text rtrim(str:keyword|text)" |str |"keyword|text" | "" |"keyword|text" |Removes trailing whitespaces from a string.| false | false | false +sin |"double sin(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" |"An angle, in radians" |double |Returns the trigonometric sine of an angle | false | false | false +sinh |"double sinh(n:double|integer|long|unsigned_long)"|n |"double|integer|long|unsigned_long" |"The number to return the hyperbolic sine of" |double | "Returns the hyperbolic sine of a number" | false | false | false +split |"keyword split(str:keyword|text, delim:keyword|text)" |[str, delim] |["keyword|text", "keyword|text"] |["", ""] |keyword | "Split a single valued string into multiple strings." | [false, false] | false | false +sqrt |"double sqrt(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "" |double | "Returns the square root of a number." | false | false | false +starts_with |"boolean starts_with(str:keyword|text, prefix:keyword|text)" |[str, prefix] |["keyword|text", "keyword|text"] |["", ""] |boolean | "Returns a boolean that indicates whether a keyword string starts with another string" | [false, false] | false | false +substring |"keyword substring(str:keyword|text, start:integer, ?length:integer)" |[str, start, length] |["keyword|text", "integer", "integer"] |["", "", ""] |keyword | "Returns a substring of a string, specified by a start position and an optional length" | [false, false, true]| false | false +sum |"long sum(field:double|integer|long|unsigned_long)" |field |"double|integer|long|unsigned_long" | "" |long | "The sum of a numeric field." | false | false | true +tan |"double tan(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "An angle, in radians" |double | "Returns the trigonometric tangent of an angle" | false | false | false +tanh |"double tanh(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "The number to return the hyperbolic tangent of" |double | "Returns the hyperbolic tangent of a number" | false | false | false +tau |double tau() | null | null | null |double | "The ratio of a circle’s circumference to its radius." | null | false | false +to_bool |"boolean to_bool(v:boolean|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|keyword|text|double|long|unsigned_long|integer" | |boolean | "Converts an input value to a boolean value." |false |false | false +to_boolean |"boolean to_boolean(v:boolean|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|keyword|text|double|long|unsigned_long|integer" | |boolean | "Converts an input value to a boolean value." |false |false | false +to_cartesianpoint |"cartesian_point to_cartesianpoint(v:cartesian_point|keyword|text)" |v |"cartesian_point|keyword|text" | |cartesian_point | "Converts an input value to a point value." |false |false | false +to_datetime |"date to_datetime(v:date|keyword|text|double|long|unsigned_long|integer)" |v |"date|keyword|text|double|long|unsigned_long|integer" | |date | "Converts an input value to a date value." |false |false | false +to_dbl |"double to_dbl(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |double | "Converts an input value to a double value." |false |false | false +to_degrees |"double to_degrees(v:double|integer|long|unsigned_long)" |v |"double|integer|long|unsigned_long" | |double | "Converts a number in radians to degrees." |false |false | false +to_double |"double to_double(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |double | "Converts an input value to a double value." |false |false | false +to_dt |"date to_dt(v:date|keyword|text|double|long|unsigned_long|integer)" |v |"date|keyword|text|double|long|unsigned_long|integer" | |date | "Converts an input value to a date value." |false |false | false +to_geopoint |"geo_point to_geopoint(v:geo_point|keyword|text)" |v |"geo_point|keyword|text" | |geo_point | "Converts an input value to a geo_point value." |false |false | false +to_int |"integer to_int(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |integer | "Converts an input value to an integer value." |false |false | false +to_integer |"integer to_integer(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |integer | "Converts an input value to an integer value." |false |false | false +to_ip |"ip to_ip(v:ip|keyword|text)" |v |"ip|keyword|text" | |ip | "Converts an input string to an IP value." |false |false | false +to_long |"long to_long(v:boolean|date|keyword|text|double|long|unsigned_long|integer|geo_point|cartesian_point)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer|geo_point|cartesian_point" | |long | "Converts an input value to a long value." |false |false | false +to_radians |"double to_radians(v:double|integer|long|unsigned_long)" |v |"double|integer|long|unsigned_long" | |double | "Converts a number in degrees to radians." |false |false | false +to_str |"keyword to_str(v:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" |v |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version" | |keyword | "Converts a field into a string." |false |false | false +to_string |"keyword to_string(v:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" |v |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version" | |keyword | "Converts a field into a string." |false |false | false +to_ul |"unsigned_long to_ul(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |unsigned_long | "Converts an input value to an unsigned long value." |false |false | false +to_ulong |"unsigned_long to_ulong(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |unsigned_long | "Converts an input value to an unsigned long value." |false |false | false +to_unsigned_long |"unsigned_long to_unsigned_long(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |unsigned_long | "Converts an input value to an unsigned long value." |false |false | false +to_ver |"version to_ver(v:keyword|text|version)" |v |"keyword|text|version" | |version | "Converts an input string to a version value." |false |false | false +to_version |"version to_version(v:keyword|text|version)" |v |"keyword|text|version" | |version | "Converts an input string to a version value." |false |false | false +trim |"keyword|text trim(str:keyword|text)" |str |"keyword|text" | "" |"keyword|text" | "Removes leading and trailing whitespaces from a string." | false | false | false ; @@ -101,74 +101,74 @@ showFunctionsSynopsis#[skip:-8.12.99] show functions | keep synopsis; synopsis:keyword -"integer|long|double|unsigned_long abs(n:integer|long|double|unsigned_long)" -"double acos(n:integer|long|double|unsigned_long)" -"double asin(n:integer|long|double|unsigned_long)" -"double atan(n:integer|long|double|unsigned_long)" -"double atan2(y:integer|long|double|unsigned_long, x:integer|long|double|unsigned_long)" +"double|integer|long|unsigned_long abs(n:double|integer|long|unsigned_long)" +"double acos(n:double|integer|long|unsigned_long)" +"double asin(n:double|integer|long|unsigned_long)" +"double atan(n:double|integer|long|unsigned_long)" +"double atan2(y:double|integer|long|unsigned_long, x:double|integer|long|unsigned_long)" "double|date auto_bucket(field:integer|long|double|date, buckets:integer, from:integer|long|double|date, to:integer|long|double|date)" -? avg(arg1:?) -? case(arg1:?, arg2...:?) -"? ceil(n:integer|long|double|unsigned_long)" -? cidr_match(arg1:?, arg2...:?) -? coalesce(arg1:?, arg2...:?) -? concat(arg1:?, arg2...:?) -"double cos(n:integer|long|double|unsigned_long)" -"double cosh(n:integer|long|double|unsigned_long)" -? count(arg1:?) -? count_distinct(arg1:?, arg2:?) +"double avg(field:double|integer|long|unsigned_long)" +"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version case(condition:boolean, rest...:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" +"double|integer|long|unsigned_long ceil(n:double|integer|long|unsigned_long)" +boolean cidr_match(ip:ip, blockX...:keyword) +"boolean|text|integer|keyword|long coalesce(expression:boolean|text|integer|keyword|long, expressionX...:boolean|text|integer|keyword|long)" +"keyword concat(first:keyword|text, rest...:keyword|text)" +"double cos(n:double|integer|long|unsigned_long)" +"double cosh(n:double|integer|long|unsigned_long)" +"long count(?field:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" +"long count_distinct(field:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version, ?precision:integer)" "integer date_diff(unit:keyword|text, startTimestamp:date, endTimestamp:date)" -? date_extract(arg1:?, arg2:?) -? date_format(arg1:?, arg2:?) +long date_extract(date_part:keyword, field:date) +keyword date_format(?format:keyword, date:date) "date date_parse(?datePattern:keyword, dateString:keyword|text)" -? date_trunc(arg1:?, arg2:?) -? e() -? ends_with(arg1:?, arg2:?) -"? floor(n:integer|long|double|unsigned_long)" -"? greatest(first:integer|long|double|boolean|keyword|text|ip|version, rest...:integer|long|double|boolean|keyword|text|ip|version)" -"? least(first:integer|long|double|boolean|keyword|text|ip|version, rest...:integer|long|double|boolean|keyword|text|ip|version)" -? left(string:keyword, length:integer) -? length(arg1:?) -"? log10(n:integer|long|double|unsigned_long)" +"date date_trunc(interval:keyword, date:date)" +double e() +"boolean ends_with(str:keyword|text, suffix:keyword|text)" +"double|integer|long|unsigned_long floor(n:double|integer|long|unsigned_long)" +"integer|long|double|boolean|keyword|text|ip|version greatest(first:integer|long|double|boolean|keyword|text|ip|version, rest...:integer|long|double|boolean|keyword|text|ip|version)" +"integer|long|double|boolean|keyword|text|ip|version least(first:integer|long|double|boolean|keyword|text|ip|version, rest...:integer|long|double|boolean|keyword|text|ip|version)" +"keyword left(str:keyword|text, length:integer)" +"integer length(str:keyword|text)" +"double log10(n:double|integer|long|unsigned_long)" "keyword|text ltrim(str:keyword|text)" -? max(arg1:?) -? median(arg1:?) -? median_absolute_deviation(arg1:?) -? min(arg1:?) -? mv_avg(arg1:?) +"double|integer|long|unsigned_long max(field:double|integer|long|unsigned_long)" +"double|integer|long|unsigned_long median(field:double|integer|long|unsigned_long)" +"double|integer|long|unsigned_long median_absolute_deviation(field:double|integer|long|unsigned_long)" +"double|integer|long|unsigned_long min(field:double|integer|long|unsigned_long)" +"double mv_avg(field:double|integer|long|unsigned_long)" "keyword mv_concat(v:text|keyword, delim:text|keyword)" -"integer mv_count(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point)" -"? mv_dedupe(v:boolean|date|double|ip|text|integer|keyword|version|long)" -"? mv_first(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point)" -"? mv_last(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point)" -"? mv_max(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long)" -? mv_median(arg1:?) -"? mv_min(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long)" -? mv_sum(arg1:?) -? now() -? percentile(arg1:?, arg2:?) -? pi() -"? pow(base:integer|unsigned_long|long|double, exponent:integer|unsigned_long|long|double)" -? replace(arg1:?, arg2:?, arg3:?) -? right(string:keyword, length:integer) -? round(arg1:?, arg2:?) +"integer mv_count(v:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" +"boolean|date|double|integer|ip|keyword|long|text|version mv_dedupe(v:boolean|date|double|integer|ip|keyword|long|text|version)" +"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version mv_first(v:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" +"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version mv_last(v:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" +"boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version mv_max(v:boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version)" +"double|integer|long|unsigned_long mv_median(v:double|integer|long|unsigned_long)" +"boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version mv_min(v:boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version)" +"double|integer|long|unsigned_long mv_sum(v:double|integer|long|unsigned_long)" +date now() +"double|integer|long|unsigned_long percentile(field:double|integer|long|unsigned_long, percentile:double|integer|long)" +double pi() +"double pow(base:double|integer|long|unsigned_long, exponent:double|integer|long|unsigned_long)" +"keyword replace(str:keyword|text, regex:keyword|text, newStr:keyword|text)" +"keyword right(str:keyword|text, length:integer)" +"double round(value:double, ?decimals:integer)" "keyword|text rtrim(str:keyword|text)" -"double sin(n:integer|long|double|unsigned_long)" -"double sinh(n:integer|long|double|unsigned_long)" -? split(arg1:?, arg2:?) -"? sqrt(n:integer|long|double|unsigned_long)" -? starts_with(arg1:?, arg2:?) -? substring(arg1:?, arg2:?, arg3:?) -? sum(arg1:?) -"double tan(n:integer|long|double|unsigned_long)" -"double tanh(n:integer|long|double|unsigned_long)" -? tau() +"double sin(n:double|integer|long|unsigned_long)" +"double sinh(n:double|integer|long|unsigned_long)" +"keyword split(str:keyword|text, delim:keyword|text)" +"double sqrt(n:double|integer|long|unsigned_long)" +"boolean starts_with(str:keyword|text, prefix:keyword|text)" +"keyword substring(str:keyword|text, start:integer, ?length:integer)" +"long sum(field:double|integer|long|unsigned_long)" +"double tan(n:double|integer|long|unsigned_long)" +"double tanh(n:double|integer|long|unsigned_long)" +double tau() "boolean to_bool(v:boolean|keyword|text|double|long|unsigned_long|integer)" "boolean to_boolean(v:boolean|keyword|text|double|long|unsigned_long|integer)" "cartesian_point to_cartesianpoint(v:cartesian_point|keyword|text)" "date to_datetime(v:date|keyword|text|double|long|unsigned_long|integer)" "double to_dbl(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" -"double to_degrees(v:double|long|unsigned_long|integer)" +"double to_degrees(v:double|integer|long|unsigned_long)" "double to_double(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" "date to_dt(v:date|keyword|text|double|long|unsigned_long|integer)" "geo_point to_geopoint(v:geo_point|keyword|text)" @@ -176,9 +176,9 @@ synopsis:keyword "integer to_integer(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" "ip to_ip(v:ip|keyword|text)" "long to_long(v:boolean|date|keyword|text|double|long|unsigned_long|integer|geo_point|cartesian_point)" -"double to_radians(v:double|long|unsigned_long|integer)" -"keyword to_str(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point)" -"keyword to_string(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point)" +"double to_radians(v:double|integer|long|unsigned_long)" +"keyword to_str(v:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" +"keyword to_string(v:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" "unsigned_long to_ul(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" "unsigned_long to_ulong(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" "unsigned_long to_unsigned_long(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" @@ -196,9 +196,9 @@ SHOW functions ; // tag::showFunctionsFiltered-result[] - name:keyword | synopsis:keyword | argNames:keyword | argTypes:keyword | argDescriptions:keyword | returnType:keyword | description:keyword | optionalArgs:boolean | variadic:boolean -sin | "double sin(n:integer|long|double|unsigned_long)" |n |"integer|long|double|unsigned_long" | "An angle, in radians" | double | "Returns the trigonometric sine of an angle" | false | false -sinh | "double sinh(n:integer|long|double|unsigned_long)" |n |"integer|long|double|unsigned_long" | "The number to return the hyperbolic sine of" | "double" | "Returns the hyperbolic sine of a number" | false | false + name:keyword | synopsis:keyword | argNames:keyword | argTypes:keyword | argDescriptions:keyword | returnType:keyword | description:keyword | optionalArgs:boolean | variadic:boolean | isAggregation:boolean +sin | "double sin(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "An angle, in radians" | double | "Returns the trigonometric sine of an angle" | false | false | false +sinh | "double sinh(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "The number to return the hyperbolic sine of" | "double" | "Returns the hyperbolic sine of a number" | false | false | false // end::showFunctionsFiltered-result[] ; diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java index 80da888eb4dfb..4ff614daaac85 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java @@ -1049,7 +1049,8 @@ public void testShowFunctions() { new ColumnInfo("returnType", "keyword"), new ColumnInfo("description", "keyword"), new ColumnInfo("optionalArgs", "boolean"), - new ColumnInfo("variadic", "boolean") + new ColumnInfo("variadic", "boolean"), + new ColumnInfo("isAggregation", "boolean") ) ) ); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java index b7e7df6cfc499..4ee6a346e2a87 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java @@ -210,7 +210,14 @@ public static String normalizeName(String name) { public record ArgSignature(String name, String[] type, String description, boolean optional) {} - public record FunctionDescription(String name, List args, String[] returnType, String description, boolean variadic) { + public record FunctionDescription( + String name, + List args, + String[] returnType, + String description, + boolean variadic, + boolean isAggregation + ) { public String fullSignature() { StringBuilder builder = new StringBuilder(); builder.append(ShowFunctions.withPipes(returnType)); @@ -245,29 +252,30 @@ public List argNames() { public static FunctionDescription description(FunctionDefinition def) { var constructors = def.clazz().getConstructors(); if (constructors.length == 0) { - return new FunctionDescription(def.name(), List.of(), null, null, false); + return new FunctionDescription(def.name(), List.of(), null, null, false, false); } Constructor constructor = constructors[0]; FunctionInfo functionInfo = constructor.getAnnotation(FunctionInfo.class); - String functionDescription = functionInfo == null ? "" : functionInfo.description(); + String functionDescription = functionInfo == null ? "" : functionInfo.description().replaceAll(System.lineSeparator(), " "); String[] returnType = functionInfo == null ? new String[] { "?" } : functionInfo.returnType(); var params = constructor.getParameters(); // no multiple c'tors supported List args = new ArrayList<>(params.length); boolean variadic = false; + boolean isAggregation = functionInfo == null ? false : functionInfo.isAggregation(); for (int i = 1; i < params.length; i++) { // skipping 1st argument, the source if (Configuration.class.isAssignableFrom(params[i].getType()) == false) { Param paramInfo = params[i].getAnnotation(Param.class); String name = paramInfo == null ? params[i].getName() : paramInfo.name(); variadic |= List.class.isAssignableFrom(params[i].getType()); String[] type = paramInfo == null ? new String[] { "?" } : paramInfo.type(); - String desc = paramInfo == null ? "" : paramInfo.description(); + String desc = paramInfo == null ? "" : paramInfo.description().replaceAll(System.lineSeparator(), " "); boolean optional = paramInfo == null ? false : paramInfo.optional(); args.add(new EsqlFunctionRegistry.ArgSignature(name, type, desc, optional)); } } - return new FunctionDescription(def.name(), args, returnType, functionDescription, variadic); + return new FunctionDescription(def.name(), args, returnType, functionDescription, variadic, isAggregation); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/FunctionInfo.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/FunctionInfo.java index 835bbfa16c25e..cd2e710498e5e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/FunctionInfo.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/FunctionInfo.java @@ -21,4 +21,6 @@ String[] returnType(); String description() default ""; + + boolean isAggregation() default false; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Avg.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Avg.java index 14e29a684a2fd..0ba834d1d8954 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Avg.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Avg.java @@ -8,6 +8,8 @@ package org.elasticsearch.xpack.esql.expression.function.aggregate; import org.elasticsearch.xpack.esql.expression.SurrogateExpression; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Div; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.function.aggregate.AggregateFunction; @@ -23,7 +25,8 @@ public class Avg extends AggregateFunction implements SurrogateExpression { - public Avg(Source source, Expression field) { + @FunctionInfo(returnType = "double", description = "The average of a numeric field.", isAggregation = true) + public Avg(Source source, @Param(name = "field", type = { "double", "integer", "long", "unsigned_long" }) Expression field) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Count.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Count.java index dcb52b6a3f2c1..6413f241dc0ca 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Count.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Count.java @@ -10,6 +10,8 @@ import org.elasticsearch.compute.aggregation.AggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.CountAggregatorFunction; import org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.planner.ToAggregator; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.Nullability; @@ -26,7 +28,28 @@ public class Count extends AggregateFunction implements EnclosedAgg, ToAggregator { - public Count(Source source, Expression field) { + @FunctionInfo(returnType = "long", description = "Returns the total number (count) of input values.", isAggregation = true) + public Count( + Source source, + @Param( + optional = true, + name = "field", + type = { + "boolean", + "cartesian_point", + "date", + "double", + "geo_point", + "integer", + "ip", + "keyword", + "long", + "text", + "unsigned_long", + "version" }, + description = "Column or literal for which to count the number of values." + ) Expression field + ) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinct.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinct.java index c49f9d6c45c1d..62dd3bc6b6254 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinct.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinct.java @@ -15,6 +15,8 @@ import org.elasticsearch.compute.aggregation.CountDistinctLongAggregatorFunctionSupplier; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.planner.ToAggregator; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.function.OptionalArgument; @@ -35,7 +37,28 @@ public class CountDistinct extends AggregateFunction implements OptionalArgument private static final int DEFAULT_PRECISION = 3000; private final Expression precision; - public CountDistinct(Source source, Expression field, Expression precision) { + @FunctionInfo(returnType = "long", description = "Returns the approximate number of distinct values.", isAggregation = true) + public CountDistinct( + Source source, + @Param( + name = "field", + type = { + "boolean", + "cartesian_point", + "date", + "double", + "geo_point", + "integer", + "ip", + "keyword", + "long", + "text", + "unsigned_long", + "version" }, + description = "Column or literal for which to count the number of distinct values." + ) Expression field, + @Param(optional = true, name = "precision", type = { "integer" }) Expression precision + ) { super(source, field, precision != null ? List.of(precision) : List.of()); this.precision = precision; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Max.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Max.java index 0964ce2bd5d67..cdcfe20c968a8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Max.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Max.java @@ -11,6 +11,8 @@ import org.elasticsearch.compute.aggregation.MaxDoubleAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.MaxIntAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.MaxLongAggregatorFunctionSupplier; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; @@ -20,7 +22,12 @@ public class Max extends NumericAggregate { - public Max(Source source, Expression field) { + @FunctionInfo( + returnType = { "double", "integer", "long", "unsigned_long" }, + description = "The maximum value of a numeric field.", + isAggregation = true + ) + public Max(Source source, @Param(name = "field", type = { "double", "integer", "long", "unsigned_long" }) Expression field) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Median.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Median.java index cffeb925d5e2b..7f5bce981db51 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Median.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Median.java @@ -9,6 +9,8 @@ import org.elasticsearch.compute.aggregation.QuantileStates; import org.elasticsearch.xpack.esql.expression.SurrogateExpression; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.Literal; import org.elasticsearch.xpack.ql.expression.function.aggregate.AggregateFunction; @@ -24,7 +26,12 @@ public class Median extends AggregateFunction implements SurrogateExpression { // TODO: Add the compression parameter - public Median(Source source, Expression field) { + @FunctionInfo( + returnType = { "double", "integer", "long", "unsigned_long" }, + description = "The value that is greater than half of all values and less than half of all values.", + isAggregation = true + ) + public Median(Source source, @Param(name = "field", type = { "double", "integer", "long", "unsigned_long" }) Expression field) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MedianAbsoluteDeviation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MedianAbsoluteDeviation.java index 6fafbeae8e1f4..ddf0fd15fe2d0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MedianAbsoluteDeviation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MedianAbsoluteDeviation.java @@ -11,6 +11,8 @@ import org.elasticsearch.compute.aggregation.MedianAbsoluteDeviationDoubleAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.MedianAbsoluteDeviationIntAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.MedianAbsoluteDeviationLongAggregatorFunctionSupplier; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; @@ -20,7 +22,15 @@ public class MedianAbsoluteDeviation extends NumericAggregate { // TODO: Add parameter - public MedianAbsoluteDeviation(Source source, Expression field) { + @FunctionInfo( + returnType = { "double", "integer", "long", "unsigned_long" }, + description = "The median absolute deviation, a measure of variability.", + isAggregation = true + ) + public MedianAbsoluteDeviation( + Source source, + @Param(name = "field", type = { "double", "integer", "long", "unsigned_long" }) Expression field + ) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Min.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Min.java index 9625322fb72c8..22da614675f9e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Min.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Min.java @@ -11,6 +11,8 @@ import org.elasticsearch.compute.aggregation.MinDoubleAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.MinIntAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.MinLongAggregatorFunctionSupplier; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; @@ -20,7 +22,12 @@ public class Min extends NumericAggregate { - public Min(Source source, Expression field) { + @FunctionInfo( + returnType = { "double", "integer", "long", "unsigned_long" }, + description = "The minimum value of a numeric field.", + isAggregation = true + ) + public Min(Source source, @Param(name = "field", type = { "double", "integer", "long", "unsigned_long" }) Expression field) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Percentile.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Percentile.java index 9620e112fbda7..c34783f7352c3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Percentile.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Percentile.java @@ -11,6 +11,8 @@ import org.elasticsearch.compute.aggregation.PercentileDoubleAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.PercentileIntAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.PercentileLongAggregatorFunctionSupplier; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; @@ -25,7 +27,16 @@ public class Percentile extends NumericAggregate { private final Expression percentile; - public Percentile(Source source, Expression field, Expression percentile) { + @FunctionInfo( + returnType = { "double", "integer", "long", "unsigned_long" }, + description = "The value at which a certain percentage of observed values occur.", + isAggregation = true + ) + public Percentile( + Source source, + @Param(name = "field", type = { "double", "integer", "long", "unsigned_long" }) Expression field, + @Param(name = "percentile", type = { "double", "integer", "long" }) Expression percentile + ) { super(source, field, List.of(percentile)); this.percentile = percentile; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Sum.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Sum.java index 115e2f9759fa9..0acf18981a83d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Sum.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Sum.java @@ -10,6 +10,8 @@ import org.elasticsearch.compute.aggregation.SumDoubleAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.SumIntAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.SumLongAggregatorFunctionSupplier; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; @@ -26,7 +28,8 @@ */ public class Sum extends NumericAggregate { - public Sum(Source source, Expression field) { + @FunctionInfo(returnType = "long", description = "The sum of a numeric field.", isAggregation = true) + public Sum(Source source, @Param(name = "field", type = { "double", "integer", "long", "unsigned_long" }) Expression field) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java index 0174eca9c1ddf..84fa57c8d636a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java @@ -17,6 +17,8 @@ import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.Literal; @@ -44,7 +46,44 @@ record Condition(Expression condition, Expression value) {} private final Expression elseValue; private DataType dataType; - public Case(Source source, Expression first, List rest) { + @FunctionInfo( + returnType = { + "boolean", + "cartesian_point", + "date", + "double", + "geo_point", + "integer", + "ip", + "keyword", + "long", + "text", + "unsigned_long", + "version" }, + description = """ + Accepts pairs of conditions and values. + The function returns the value that belongs to the first condition that evaluates to true.""" + ) + public Case( + Source source, + @Param(name = "condition", type = { "boolean" }) Expression first, + @Param( + name = "rest", + type = { + "boolean", + "cartesian_point", + "date", + "double", + "geo_point", + "integer", + "ip", + "keyword", + "long", + "text", + "unsigned_long", + "version" } + ) List rest + ) { super(source, Stream.concat(Stream.of(first), rest.stream()).toList()); int conditionCount = children().size() / 2; conditions = new ArrayList<>(conditionCount); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java index 25477e501645d..02589140e98a0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java @@ -12,6 +12,7 @@ import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvMax; import org.elasticsearch.xpack.ql.expression.Expression; @@ -37,6 +38,10 @@ public class Greatest extends ScalarFunction implements EvaluatorMapper, OptionalArgument { private DataType dataType; + @FunctionInfo( + returnType = { "integer", "long", "double", "boolean", "keyword", "text", "ip", "version" }, + description = "Returns the maximum value from many columns." + ) public Greatest( Source source, @Param(name = "first", type = { "integer", "long", "double", "boolean", "keyword", "text", "ip", "version" }) Expression first, diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java index e7456b0871b7c..912efcf7b7414 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java @@ -12,6 +12,7 @@ import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvMin; import org.elasticsearch.xpack.ql.expression.Expression; @@ -37,6 +38,10 @@ public class Least extends ScalarFunction implements EvaluatorMapper, OptionalArgument { private DataType dataType; + @FunctionInfo( + returnType = { "integer", "long", "double", "boolean", "keyword", "text", "ip", "version" }, + description = "Returns the minimum value from many columns." + ) public Least( Source source, @Param(name = "first", type = { "integer", "long", "double", "boolean", "keyword", "text", "ip", "version" }) Expression first, diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBoolean.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBoolean.java index 3a33e086d8fdd..388ab970205ad 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBoolean.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBoolean.java @@ -41,7 +41,7 @@ public class ToBoolean extends AbstractConvertFunction { Map.entry(INTEGER, ToBooleanFromIntEvaluator.Factory::new) ); - @FunctionInfo(returnType = "boolean") + @FunctionInfo(returnType = "boolean", description = "Converts an input value to a boolean value.") public ToBoolean( Source source, @Param(name = "v", type = { "boolean", "keyword", "text", "double", "long", "unsigned_long", "integer" }) Expression field diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPoint.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPoint.java index baa999e125f7e..3756c322abc4e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPoint.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPoint.java @@ -32,7 +32,7 @@ public class ToCartesianPoint extends AbstractConvertFunction { Map.entry(TEXT, ToCartesianPointFromStringEvaluator.Factory::new) ); - @FunctionInfo(returnType = "cartesian_point") + @FunctionInfo(returnType = "cartesian_point", description = "Converts an input value to a point value.") public ToCartesianPoint(Source source, @Param(name = "v", type = { "cartesian_point", "keyword", "text" }) Expression field) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetime.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetime.java index c2f621433ca21..1ff8bc39e36f4 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetime.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetime.java @@ -40,7 +40,7 @@ public class ToDatetime extends AbstractConvertFunction { Map.entry(INTEGER, ToLongFromIntEvaluator.Factory::new) // CastIntToLongEvaluator would be a candidate, but not MV'd ); - @FunctionInfo(returnType = "date") + @FunctionInfo(returnType = "date", description = "Converts an input value to a date value.") public ToDatetime( Source source, @Param(name = "v", type = { "date", "keyword", "text", "double", "long", "unsigned_long", "integer" }) Expression field diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegrees.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegrees.java index c858bdbdb3993..c5e7b473f4e56 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegrees.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegrees.java @@ -40,8 +40,8 @@ public class ToDegrees extends AbstractConvertFunction implements EvaluatorMappe ) ); - @FunctionInfo(returnType = "double") - public ToDegrees(Source source, @Param(name = "v", type = { "double", "long", "unsigned_long", "integer" }) Expression field) { + @FunctionInfo(returnType = "double", description = "Converts a number in radians to degrees.") + public ToDegrees(Source source, @Param(name = "v", type = { "double", "integer", "long", "unsigned_long" }) Expression field) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDouble.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDouble.java index 7711f55d667ba..6a984abdad50f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDouble.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDouble.java @@ -42,7 +42,7 @@ public class ToDouble extends AbstractConvertFunction { Map.entry(INTEGER, ToDoubleFromIntEvaluator.Factory::new) // CastIntToDoubleEvaluator would be a candidate, but not MV'd ); - @FunctionInfo(returnType = "double") + @FunctionInfo(returnType = "double", description = "Converts an input value to a double value.") public ToDouble( Source source, @Param(name = "v", type = { "boolean", "date", "keyword", "text", "double", "long", "unsigned_long", "integer" }) Expression field diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPoint.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPoint.java index 8680d6d7e4a2c..16ea1235ccf59 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPoint.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPoint.java @@ -32,7 +32,7 @@ public class ToGeoPoint extends AbstractConvertFunction { Map.entry(TEXT, ToGeoPointFromStringEvaluator.Factory::new) ); - @FunctionInfo(returnType = "geo_point") + @FunctionInfo(returnType = "geo_point", description = "Converts an input value to a geo_point value.") public ToGeoPoint(Source source, @Param(name = "v", type = { "geo_point", "keyword", "text" }) Expression field) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIP.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIP.java index 97512a03fe2ec..fc6a5f5c69afa 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIP.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIP.java @@ -32,7 +32,7 @@ public class ToIP extends AbstractConvertFunction { Map.entry(TEXT, ToIPFromStringEvaluator.Factory::new) ); - @FunctionInfo(returnType = "ip") + @FunctionInfo(returnType = "ip", description = "Converts an input string to an IP value.") public ToIP(Source source, @Param(name = "v", type = { "ip", "keyword", "text" }) Expression field) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToInteger.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToInteger.java index a8e4ef804a2ba..2288ddcc33a55 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToInteger.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToInteger.java @@ -44,7 +44,7 @@ public class ToInteger extends AbstractConvertFunction { Map.entry(LONG, ToIntegerFromLongEvaluator.Factory::new) ); - @FunctionInfo(returnType = "integer") + @FunctionInfo(returnType = "integer", description = "Converts an input value to an integer value.") public ToInteger( Source source, @Param(name = "v", type = { "boolean", "date", "keyword", "text", "double", "long", "unsigned_long", "integer" }) Expression field diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLong.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLong.java index 06f56e81fc50d..e34d226f01292 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLong.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLong.java @@ -51,7 +51,7 @@ public class ToLong extends AbstractConvertFunction { Map.entry(INTEGER, ToLongFromIntEvaluator.Factory::new) // CastIntToLongEvaluator would be a candidate, but not MV'd ); - @FunctionInfo(returnType = "long") + @FunctionInfo(returnType = "long", description = "Converts an input value to a long value.") public ToLong( Source source, @Param( diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadians.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadians.java index a1d2e1381109d..ac31cf3759ad9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadians.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadians.java @@ -39,8 +39,8 @@ public class ToRadians extends AbstractConvertFunction implements EvaluatorMappe ) ); - @FunctionInfo(returnType = "double") - public ToRadians(Source source, @Param(name = "v", type = { "double", "long", "unsigned_long", "integer" }) Expression field) { + @FunctionInfo(returnType = "double", description = "Converts a number in degrees to radians.") + public ToRadians(Source source, @Param(name = "v", type = { "double", "integer", "long", "unsigned_long" }) Expression field) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToString.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToString.java index ea5343c74a105..e157f508f9466 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToString.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToString.java @@ -56,24 +56,24 @@ public class ToString extends AbstractConvertFunction implements EvaluatorMapper Map.entry(CARTESIAN_POINT, ToStringFromCartesianPointEvaluator.Factory::new) ); - @FunctionInfo(returnType = "keyword") + @FunctionInfo(returnType = "keyword", description = "Converts a field into a string.") public ToString( Source source, @Param( name = "v", type = { - "unsigned_long", - "date", "boolean", + "cartesian_point", + "date", "double", - "ip", - "text", + "geo_point", "integer", + "ip", "keyword", - "version", "long", - "geo_point", - "cartesian_point" } + "text", + "unsigned_long", + "version" } ) Expression v ) { super(source, v); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLong.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLong.java index 651259db06054..656d99ee8ab80 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLong.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLong.java @@ -46,7 +46,7 @@ public class ToUnsignedLong extends AbstractConvertFunction { Map.entry(INTEGER, ToUnsignedLongFromIntEvaluator.Factory::new) ); - @FunctionInfo(returnType = "unsigned_long") + @FunctionInfo(returnType = "unsigned_long", description = "Converts an input value to an unsigned long value.") public ToUnsignedLong( Source source, @Param(name = "v", type = { "boolean", "date", "keyword", "text", "double", "long", "unsigned_long", "integer" }) Expression field diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersion.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersion.java index 34e8f695b23c3..e196a91e3bac2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersion.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersion.java @@ -32,7 +32,7 @@ public class ToVersion extends AbstractConvertFunction { Map.entry(TEXT, ToVersionFromStringEvaluator.Factory::new) ); - @FunctionInfo(returnType = "version") + @FunctionInfo(returnType = "version", description = "Converts an input string to a version value.") public ToVersion(Source source, @Param(name = "v", type = { "keyword", "text", "version" }) Expression v) { super(source, v); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java index 3a0ab9403b841..348d4a66479f6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java @@ -13,6 +13,8 @@ import org.elasticsearch.compute.ann.Fixed; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.TypeResolutions; @@ -38,7 +40,21 @@ public class DateExtract extends ConfigurationFunction implements EvaluatorMappe private ChronoField chronoField; - public DateExtract(Source source, Expression chronoFieldExp, Expression field, Configuration configuration) { + @FunctionInfo(returnType = "long", description = "Extracts parts of a date, like year, month, day, hour.") + public DateExtract( + Source source, + // Need to replace the commas in the description here with semi-colon as there's a bug in the CSV parser + // used in the CSVTests and fixing it is not trivial + @Param(name = "date_part", type = { "keyword" }, description = """ + Part of the date to extract. + Can be: aligned_day_of_week_in_month; aligned_day_of_week_in_year; aligned_week_of_month; + aligned_week_of_year; ampm_of_day; clock_hour_of_ampm; clock_hour_of_day; day_of_month; day_of_week; + day_of_year; epoch_day; era; hour_of_ampm; hour_of_day; instant_seconds; micro_of_day; micro_of_second; + milli_of_day; milli_of_second; minute_of_day; minute_of_hour; month_of_year; nano_of_day; nano_of_second; + offset_seconds; proleptic_month; second_of_day; second_of_minute; year; or year_of_era.""") Expression chronoFieldExp, + @Param(name = "field", type = "date", description = "Date expression") Expression field, + Configuration configuration + ) { super(source, List.of(chronoFieldExp, field), configuration); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormat.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormat.java index ce5ca2219c785..8a41e2d5ddcf7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormat.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormat.java @@ -13,6 +13,8 @@ import org.elasticsearch.compute.ann.Fixed; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.session.EsqlConfiguration; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.function.OptionalArgument; @@ -39,10 +41,16 @@ public class DateFormat extends ConfigurationFunction implements OptionalArgumen private final Expression field; private final Expression format; - public DateFormat(Source source, Expression first, Expression second, Configuration configuration) { - super(source, second != null ? List.of(first, second) : List.of(first), configuration); - this.field = second != null ? second : first; - this.format = second != null ? first : null; + @FunctionInfo(returnType = "keyword", description = "Returns a string representation of a date, in the provided format.") + public DateFormat( + Source source, + @Param(optional = true, name = "format", type = { "keyword" }, description = "A valid date pattern") Expression format, + @Param(name = "date", type = { "date" }, description = "Date expression") Expression date, + Configuration configuration + ) { + super(source, date != null ? List.of(format, date) : List.of(format), configuration); + this.field = date != null ? date : format; + this.format = date != null ? format : null; } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java index e5063bc0cbab4..0f35b95a287ad 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java @@ -13,6 +13,8 @@ import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.core.TimeValue; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.function.scalar.BinaryScalarFunction; @@ -32,7 +34,18 @@ public class DateTrunc extends BinaryDateTimeFunction implements EvaluatorMapper { - public DateTrunc(Source source, Expression interval, Expression field) { + @FunctionInfo(returnType = "date", description = "Rounds down a date to the closest interval.") + public DateTrunc( + Source source, + // Need to replace the commas in the description here with semi-colon as there's a bug in the CSV parser + // used in the CSVTests and fixing it is not trivial + @Param( + name = "interval", + type = { "keyword" }, + description = "Interval; expressed using the timespan literal syntax." + ) Expression interval, + @Param(name = "date", type = { "date" }, description = "Date expression") Expression field + ) { super(source, interval, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/Now.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/Now.java index ae8bd3e77f65e..1d104328e586a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/Now.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/Now.java @@ -11,6 +11,7 @@ import org.elasticsearch.compute.ann.Fixed; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.function.scalar.ConfigurationFunction; import org.elasticsearch.xpack.ql.expression.gen.script.ScriptTemplate; @@ -27,6 +28,7 @@ public class Now extends ConfigurationFunction implements EvaluatorMapper { private final long now; + @FunctionInfo(returnType = "date", description = "Returns current date and time.") public Now(Source source, Configuration configuration) { super(source, List.of(), configuration); this.now = configuration.now() == null ? System.currentTimeMillis() : configuration.now().toInstant().toEpochMilli(); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatch.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatch.java index f47637ced2e90..80d306fdc4fda 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatch.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatch.java @@ -13,6 +13,8 @@ import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.function.scalar.ScalarFunction; import org.elasticsearch.xpack.ql.expression.gen.script.ScriptTemplate; @@ -48,7 +50,12 @@ public class CIDRMatch extends ScalarFunction implements EvaluatorMapper { private final Expression ipField; private final List matches; - public CIDRMatch(Source source, Expression ipField, List matches) { + @FunctionInfo(returnType = "boolean", description = "Returns true if the provided IP is contained in one of the provided CIDR blocks.") + public CIDRMatch( + Source source, + @Param(name = "ip", type = { "ip" }) Expression ipField, + @Param(name = "blockX", type = { "keyword" }, description = "CIDR block to test the IP against.") List matches + ) { super(source, CollectionUtils.combine(singletonList(ipField), matches)); this.ipField = ipField; this.matches = matches; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Abs.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Abs.java index 8bc3ba3b184e9..7a2a2a5d05683 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Abs.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Abs.java @@ -23,8 +23,8 @@ import java.util.function.Function; public class Abs extends UnaryScalarFunction implements EvaluatorMapper { - @FunctionInfo(returnType = { "integer", "long", "double", "unsigned_long" }) - public Abs(Source source, @Param(name = "n", type = { "integer", "long", "double", "unsigned_long" }) Expression n) { + @FunctionInfo(returnType = { "double", "integer", "long", "unsigned_long" }, description = "Returns the absolute value.") + public Abs(Source source, @Param(name = "n", type = { "double", "integer", "long", "unsigned_long" }) Expression n) { super(source, n); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Acos.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Acos.java index 5df73102a5ee6..603ef86af6c64 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Acos.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Acos.java @@ -21,8 +21,8 @@ * Inverse cosine trigonometric function. */ public class Acos extends AbstractTrigonometricFunction { - @FunctionInfo(returnType = "double") - public Acos(Source source, @Param(name = "n", type = { "integer", "long", "double", "unsigned_long" }) Expression n) { + @FunctionInfo(returnType = "double", description = "The arccosine of an angle, expressed in radians.") + public Acos(Source source, @Param(name = "n", type = { "double", "integer", "long", "unsigned_long" }) Expression n) { super(source, n); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Asin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Asin.java index 66d35d8e8bb2c..f66409921ad2f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Asin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Asin.java @@ -21,8 +21,8 @@ * Inverse cosine trigonometric function. */ public class Asin extends AbstractTrigonometricFunction { - @FunctionInfo(returnType = "double") - public Asin(Source source, @Param(name = "n", type = { "integer", "long", "double", "unsigned_long" }) Expression n) { + @FunctionInfo(returnType = "double", description = "Inverse sine trigonometric function.") + public Asin(Source source, @Param(name = "n", type = { "double", "integer", "long", "unsigned_long" }) Expression n) { super(source, n); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan.java index 88079e60fa66a..8f0ad96f96e8c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan.java @@ -21,8 +21,8 @@ * Inverse cosine trigonometric function. */ public class Atan extends AbstractTrigonometricFunction { - @FunctionInfo(returnType = "double") - public Atan(Source source, @Param(name = "n", type = { "integer", "long", "double", "unsigned_long" }) Expression n) { + @FunctionInfo(returnType = "double", description = "Inverse tangent trigonometric function.") + public Atan(Source source, @Param(name = "n", type = { "double", "integer", "long", "unsigned_long" }) Expression n) { super(source, n); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2.java index e754aff1853b3..eca3b236abb8c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2.java @@ -34,11 +34,14 @@ public class Atan2 extends ScalarFunction implements EvaluatorMapper { private final Expression y; private final Expression x; - @FunctionInfo(returnType = "double") + @FunctionInfo( + returnType = "double", + description = "The angle between the positive x-axis and the ray from the origin to the point (x , y) in the Cartesian plane." + ) public Atan2( Source source, - @Param(name = "y", type = { "integer", "long", "double", "unsigned_long" }) Expression y, - @Param(name = "x", type = { "integer", "long", "double", "unsigned_long" }) Expression x + @Param(name = "y", type = { "double", "integer", "long", "unsigned_long" }) Expression y, + @Param(name = "x", type = { "double", "integer", "long", "unsigned_long" }) Expression x ) { super(source, List.of(y, x)); this.y = y; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucket.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucket.java index 27abeb44b2ff0..33e0addf44d2f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucket.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucket.java @@ -83,7 +83,9 @@ public class AutoBucket extends ScalarFunction implements EvaluatorMapper { private final Expression from; private final Expression to; - @FunctionInfo(returnType = { "double", "date" }) + @FunctionInfo(returnType = { "double", "date" }, description = """ + Creates human-friendly buckets and returns a datetime value + for each row that corresponds to the resulting bucket the row falls into.""") public AutoBucket( Source source, @Param(name = "field", type = { "integer", "long", "double", "date" }) Expression field, diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Ceil.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Ceil.java index 85e5489bd74a0..cafc3d4df7613 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Ceil.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Ceil.java @@ -10,6 +10,7 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction; import org.elasticsearch.xpack.ql.expression.Expression; @@ -30,7 +31,8 @@ *

*/ public class Ceil extends UnaryScalarFunction implements EvaluatorMapper { - public Ceil(Source source, @Param(name = "n", type = { "integer", "long", "double", "unsigned_long" }) Expression n) { + @FunctionInfo(returnType = { "double", "integer", "long", "unsigned_long" }, description = "Round a number up to the nearest integer.") + public Ceil(Source source, @Param(name = "n", type = { "double", "integer", "long", "unsigned_long" }) Expression n) { super(source, n); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cos.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cos.java index 5f8661bb0ae7d..0ecc0381636ef 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cos.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cos.java @@ -24,7 +24,7 @@ public class Cos extends AbstractTrigonometricFunction { @FunctionInfo(returnType = "double", description = "Returns the trigonometric cosine of an angle") public Cos( Source source, - @Param(name = "n", type = { "integer", "long", "double", "unsigned_long" }, description = "An angle, in radians") Expression n + @Param(name = "n", type = { "double", "integer", "long", "unsigned_long" }, description = "An angle, in radians") Expression n ) { super(source, n); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cosh.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cosh.java index 6cc49cec0c32d..78d982acc7bb6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cosh.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cosh.java @@ -26,7 +26,7 @@ public Cosh( Source source, @Param( name = "n", - type = { "integer", "long", "double", "unsigned_long" }, + type = { "double", "integer", "long", "unsigned_long" }, description = "The number who's hyperbolic cosine is to be returned" ) Expression n ) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/E.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/E.java index d2900062f7875..3497a945f1562 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/E.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/E.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.Source; @@ -16,6 +17,7 @@ * Function that emits Euler's number. */ public class E extends DoubleConstantFunction { + @FunctionInfo(returnType = "double", description = "Euler’s number.") public E(Source source) { super(source); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Floor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Floor.java index a0157105a9b82..b64be370eaded 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Floor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Floor.java @@ -10,6 +10,7 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction; import org.elasticsearch.xpack.ql.expression.Expression; @@ -30,7 +31,11 @@ *

*/ public class Floor extends UnaryScalarFunction implements EvaluatorMapper { - public Floor(Source source, @Param(name = "n", type = { "integer", "long", "double", "unsigned_long" }) Expression n) { + @FunctionInfo( + returnType = { "double", "integer", "long", "unsigned_long" }, + description = "Round a number down to the nearest integer." + ) + public Floor(Source source, @Param(name = "n", type = { "double", "integer", "long", "unsigned_long" }) Expression n) { super(source, n); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10.java index 84bc9d19b409e..bca7c158ba366 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10.java @@ -11,6 +11,7 @@ import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction; import org.elasticsearch.xpack.ql.expression.Expression; @@ -27,7 +28,8 @@ import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isNumeric; public class Log10 extends UnaryScalarFunction implements EvaluatorMapper { - public Log10(Source source, @Param(name = "n", type = { "integer", "long", "double", "unsigned_long" }) Expression n) { + @FunctionInfo(returnType = "double", description = "Returns the log base 10.") + public Log10(Source source, @Param(name = "n", type = { "double", "integer", "long", "unsigned_long" }) Expression n) { super(source, n); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Pi.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Pi.java index bd36be56b356c..a58bffd1dcbad 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Pi.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Pi.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.Source; @@ -16,6 +17,8 @@ * Function that emits pi. */ public class Pi extends DoubleConstantFunction { + + @FunctionInfo(returnType = "double", description = "The ratio of a circle’s circumference to its diameter.") public Pi(Source source) { super(source); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Pow.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Pow.java index 57f32cf2212d3..5ff5a7019dcdb 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Pow.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Pow.java @@ -10,6 +10,7 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.function.OptionalArgument; @@ -35,10 +36,11 @@ public class Pow extends ScalarFunction implements OptionalArgument, EvaluatorMa private final Expression base, exponent; private final DataType dataType; + @FunctionInfo(returnType = "double", description = "Returns the value of a base raised to the power of an exponent.") public Pow( Source source, - @Param(name = "base", type = { "integer", "unsigned_long", "long", "double" }) Expression base, - @Param(name = "exponent", type = { "integer", "unsigned_long", "long", "double" }) Expression exponent + @Param(name = "base", type = { "double", "integer", "long", "unsigned_long" }) Expression base, + @Param(name = "exponent", type = { "double", "integer", "long", "unsigned_long" }) Expression exponent ) { super(source, Arrays.asList(base, exponent)); this.base = base; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Round.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Round.java index 3cbc74b3b6c28..3d62fcc7e044d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Round.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Round.java @@ -12,6 +12,8 @@ import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.function.OptionalArgument; import org.elasticsearch.xpack.ql.expression.function.scalar.ScalarFunction; @@ -43,7 +45,18 @@ public class Round extends ScalarFunction implements OptionalArgument, Evaluator private final Expression field, decimals; - public Round(Source source, Expression field, Expression decimals) { + // @TODO: add support for "integer", "long", "unsigned_long" once tests are fixed + @FunctionInfo(returnType = "double", description = "Rounds a number to the closest number with the specified number of digits.") + public Round( + Source source, + @Param(name = "value", type = "double", description = "The numeric value to round") Expression field, + @Param( + optional = true, + name = "decimals", + type = { "integer" }, + description = "The number of decimal places to round to. Defaults to 0." + ) Expression decimals + ) { super(source, decimals != null ? Arrays.asList(field, decimals) : Arrays.asList(field)); this.field = field; this.decimals = decimals; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sin.java index 7487d8df90395..b3f204cfc09c8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sin.java @@ -25,7 +25,7 @@ public class Sin extends AbstractTrigonometricFunction { @FunctionInfo(returnType = "double", description = "Returns the trigonometric sine of an angle") public Sin( Source source, - @Param(name = "n", type = { "integer", "long", "double", "unsigned_long" }, description = "An angle, in radians") Expression n + @Param(name = "n", type = { "double", "integer", "long", "unsigned_long" }, description = "An angle, in radians") Expression n ) { super(source, n); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sinh.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sinh.java index 4b2adef5a2d6f..25221043f297d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sinh.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sinh.java @@ -26,7 +26,7 @@ public Sinh( Source source, @Param( name = "n", - type = { "integer", "long", "double", "unsigned_long" }, + type = { "double", "integer", "long", "unsigned_long" }, description = "The number to return the hyperbolic sine of" ) Expression n ) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sqrt.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sqrt.java index bdaf3a9498b09..c3f9855fdc4ad 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sqrt.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sqrt.java @@ -11,6 +11,7 @@ import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction; import org.elasticsearch.xpack.ql.expression.Expression; @@ -27,7 +28,8 @@ import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isNumeric; public class Sqrt extends UnaryScalarFunction implements EvaluatorMapper { - public Sqrt(Source source, @Param(name = "n", type = { "integer", "long", "double", "unsigned_long" }) Expression n) { + @FunctionInfo(returnType = "double", description = "Returns the square root of a number.") + public Sqrt(Source source, @Param(name = "n", type = { "double", "integer", "long", "unsigned_long" }) Expression n) { super(source, n); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tan.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tan.java index 5596c9098c034..528a0ae0a0e71 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tan.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tan.java @@ -24,7 +24,7 @@ public class Tan extends AbstractTrigonometricFunction { @FunctionInfo(returnType = "double", description = "Returns the trigonometric tangent of an angle") public Tan( Source source, - @Param(name = "n", type = { "integer", "long", "double", "unsigned_long" }, description = "An angle, in radians") Expression n + @Param(name = "n", type = { "double", "integer", "long", "unsigned_long" }, description = "An angle, in radians") Expression n ) { super(source, n); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tanh.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tanh.java index ce59cec50bcca..c77bbaedf91b8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tanh.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tanh.java @@ -26,7 +26,7 @@ public Tanh( Source source, @Param( name = "n", - type = { "integer", "long", "double", "unsigned_long" }, + type = { "double", "integer", "long", "unsigned_long" }, description = "The number to return the hyperbolic tangent of" ) Expression n ) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tau.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tau.java index e40d979886d0c..fd51d9f611d41 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tau.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tau.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.Source; @@ -18,6 +19,7 @@ public class Tau extends DoubleConstantFunction { public static final double TAU = Math.PI * 2; + @FunctionInfo(returnType = "double", description = "The ratio of a circle’s circumference to its radius.") public Tau(Source source) { super(source); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvg.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvg.java index 296229bab9b5a..5e9a4e2a75878 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvg.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvg.java @@ -12,6 +12,8 @@ import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.search.aggregations.metrics.CompensatedSum; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; @@ -29,7 +31,11 @@ * Reduce a multivalued field to a single valued field containing the average value. */ public class MvAvg extends AbstractMultivalueFunction { - public MvAvg(Source source, Expression field) { + @FunctionInfo( + returnType = "double", + description = "Converts a multivalued field into a single valued field containing the average of all of the values." + ) + public MvAvg(Source source, @Param(name = "field", type = { "double", "integer", "long", "unsigned_long" }) Expression field) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCount.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCount.java index 9e4482bd48682..29350203a966d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCount.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCount.java @@ -37,18 +37,18 @@ public MvCount( @Param( name = "v", type = { - "unsigned_long", - "date", "boolean", + "cartesian_point", + "date", "double", - "ip", - "text", + "geo_point", "integer", + "ip", "keyword", - "version", "long", - "geo_point", - "cartesian_point" } + "text", + "unsigned_long", + "version" } ) Expression v ) { super(source, v); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupe.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupe.java index 7d9b40ad0d24f..dc5fa0036f789 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupe.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupe.java @@ -25,13 +25,14 @@ * Removes duplicate values from a multivalued field. */ public class MvDedupe extends AbstractMultivalueFunction { - @FunctionInfo(returnType = "?", description = "Remove duplicate values from a multivalued field.") + // @TODO: add cartesian_point, geo_point, unsigned_long + @FunctionInfo( + returnType = { "boolean", "date", "double", "integer", "ip", "keyword", "long", "text", "version" }, + description = "Remove duplicate values from a multivalued field." + ) public MvDedupe( Source source, - @Param( - name = "v", - type = { "boolean", "date", "double", "ip", "text", "integer", "keyword", "version", "long" } // TODO add unsigned_long - ) Expression field + @Param(name = "v", type = { "boolean", "date", "double", "integer", "ip", "keyword", "long", "text", "version" }) Expression field ) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirst.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirst.java index 1acb135292995..2bc8314959995 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirst.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirst.java @@ -33,24 +33,39 @@ * Reduce a multivalued field to a single valued field containing the minimum value. */ public class MvFirst extends AbstractMultivalueFunction { - @FunctionInfo(returnType = "?", description = "Reduce a multivalued field to a single valued field containing the first value.") + @FunctionInfo( + returnType = { + "boolean", + "cartesian_point", + "date", + "double", + "geo_point", + "integer", + "ip", + "keyword", + "long", + "text", + "unsigned_long", + "version" }, + description = "Reduce a multivalued field to a single valued field containing the first value." + ) public MvFirst( Source source, @Param( name = "v", type = { - "unsigned_long", - "date", "boolean", + "cartesian_point", + "date", "double", - "ip", - "text", + "geo_point", "integer", + "ip", "keyword", - "version", "long", - "geo_point", - "cartesian_point" } + "text", + "unsigned_long", + "version" } ) Expression field ) { super(source, field); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLast.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLast.java index 2e6066a6dc98c..aad003a649cca 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLast.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLast.java @@ -33,24 +33,39 @@ * Reduce a multivalued field to a single valued field containing the minimum value. */ public class MvLast extends AbstractMultivalueFunction { - @FunctionInfo(returnType = "?", description = "Reduce a multivalued field to a single valued field containing the last value.") + @FunctionInfo( + returnType = { + "boolean", + "cartesian_point", + "date", + "double", + "geo_point", + "integer", + "ip", + "keyword", + "long", + "text", + "unsigned_long", + "version" }, + description = "Reduce a multivalued field to a single valued field containing the last value." + ) public MvLast( Source source, @Param( name = "v", type = { - "unsigned_long", - "date", "boolean", + "cartesian_point", + "date", "double", - "ip", - "text", + "geo_point", "integer", + "ip", "keyword", - "version", "long", - "geo_point", - "cartesian_point" } + "text", + "unsigned_long", + "version" } ) Expression field ) { super(source, field); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMax.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMax.java index fafd8d6a584fa..0b08b99ca0687 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMax.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMax.java @@ -29,12 +29,15 @@ * Reduce a multivalued field to a single valued field containing the maximum value. */ public class MvMax extends AbstractMultivalueFunction { - @FunctionInfo(returnType = "?", description = "Reduce a multivalued field to a single valued field containing the maximum value.") + @FunctionInfo( + returnType = { "boolean", "date", "double", "integer", "ip", "keyword", "long", "text", "unsigned_long", "version" }, + description = "Reduce a multivalued field to a single valued field containing the maximum value." + ) public MvMax( Source source, @Param( name = "v", - type = { "unsigned_long", "date", "boolean", "double", "ip", "text", "integer", "keyword", "version", "long" } + type = { "boolean", "date", "double", "integer", "ip", "keyword", "long", "text", "unsigned_long", "version" } ) Expression v ) { super(source, v); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedian.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedian.java index b60885967264c..66a8ec13b4475 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedian.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedian.java @@ -14,6 +14,8 @@ import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; @@ -33,7 +35,11 @@ * Reduce a multivalued field to a single valued field containing the average value. */ public class MvMedian extends AbstractMultivalueFunction { - public MvMedian(Source source, Expression field) { + @FunctionInfo( + returnType = { "double", "integer", "long", "unsigned_long" }, + description = "Converts a multivalued field into a single valued field containing the median value." + ) + public MvMedian(Source source, @Param(name = "v", type = { "double", "integer", "long", "unsigned_long" }) Expression field) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMin.java index 1ae2ef41191b5..91a48e539042f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMin.java @@ -29,12 +29,15 @@ * Reduce a multivalued field to a single valued field containing the minimum value. */ public class MvMin extends AbstractMultivalueFunction { - @FunctionInfo(returnType = "?", description = "Reduce a multivalued field to a single valued field containing the minimum value.") + @FunctionInfo( + returnType = { "boolean", "date", "double", "integer", "ip", "keyword", "long", "text", "unsigned_long", "version" }, + description = "Reduce a multivalued field to a single valued field containing the minimum value." + ) public MvMin( Source source, @Param( name = "v", - type = { "unsigned_long", "date", "boolean", "double", "ip", "text", "integer", "keyword", "version", "long" } + type = { "boolean", "date", "double", "integer", "ip", "keyword", "long", "text", "unsigned_long", "version" } ) Expression field ) { super(source, field); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSum.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSum.java index 48b83aa205549..96d7ad905c8d0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSum.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSum.java @@ -12,6 +12,8 @@ import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.search.aggregations.metrics.CompensatedSum; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; @@ -28,7 +30,11 @@ * Reduce a multivalued field to a single valued field containing the sum of all values. */ public class MvSum extends AbstractMultivalueFunction { - public MvSum(Source source, Expression field) { + @FunctionInfo( + returnType = { "double", "integer", "long", "unsigned_long" }, + description = "Converts a multivalued field into a single valued field containing the sum of all of the values." + ) + public MvSum(Source source, @Param(name = "v", type = { "double", "integer", "long", "unsigned_long" }) Expression field) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java index 43d4fff9c486d..4efbb6a306366 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java @@ -16,6 +16,8 @@ import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.Expressions; @@ -41,7 +43,23 @@ public class Coalesce extends ScalarFunction implements EvaluatorMapper, OptionalArgument { private DataType dataType; - public Coalesce(Source source, Expression first, List rest) { + @FunctionInfo( + returnType = { "boolean", "text", "integer", "keyword", "long" }, + description = "Returns the first of its arguments that is not null." + ) + public Coalesce( + Source source, + @Param( + name = "expression", + type = { "boolean", "text", "integer", "keyword", "long" }, + description = "Expression to evaluate" + ) Expression first, + @Param( + name = "expressionX", + type = { "boolean", "text", "integer", "keyword", "long" }, + description = "Other expression to evaluate" + ) List rest + ) { super(source, Stream.concat(Stream.of(first), rest.stream()).toList()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Concat.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Concat.java index 61f0dac6a9bbd..a3784bd0c8579 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Concat.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Concat.java @@ -14,6 +14,8 @@ import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.EsqlClientException; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.Expressions; import org.elasticsearch.xpack.ql.expression.function.scalar.ScalarFunction; @@ -38,7 +40,12 @@ public class Concat extends ScalarFunction implements EvaluatorMapper { static final long MAX_CONCAT_LENGTH = MB.toBytes(1); - public Concat(Source source, Expression first, List rest) { + @FunctionInfo(returnType = "keyword", description = "Concatenates two or more strings.") + public Concat( + Source source, + @Param(name = "first", type = { "keyword", "text" }) Expression first, + @Param(name = "rest", type = { "keyword", "text" }) List rest + ) { super(source, Stream.concat(Stream.of(first), rest.stream()).toList()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWith.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWith.java index 250cbfad69b39..8db1d10b3d42c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWith.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWith.java @@ -11,6 +11,8 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.function.scalar.ScalarFunction; import org.elasticsearch.xpack.ql.expression.gen.script.ScriptTemplate; @@ -32,7 +34,15 @@ public class EndsWith extends ScalarFunction implements EvaluatorMapper { private final Expression str; private final Expression suffix; - public EndsWith(Source source, Expression str, Expression suffix) { + @FunctionInfo( + returnType = "boolean", + description = "Returns a boolean that indicates whether a keyword string ends with another string" + ) + public EndsWith( + Source source, + @Param(name = "str", type = { "keyword", "text" }) Expression str, + @Param(name = "suffix", type = { "keyword", "text" }) Expression suffix + ) { super(source, Arrays.asList(str, suffix)); this.str = str; this.suffix = suffix; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Left.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Left.java index 65d3a6388f790..70e11e69c1b70 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Left.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Left.java @@ -13,6 +13,7 @@ import org.elasticsearch.compute.ann.Fixed; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.function.scalar.ScalarFunction; @@ -42,9 +43,13 @@ public class Left extends ScalarFunction implements EvaluatorMapper { private final Expression length; + @FunctionInfo( + returnType = "keyword", + description = "Return the substring that extracts length chars from the string starting from the left." + ) public Left( Source source, - @Param(name = "string", type = { "keyword" }) Expression str, + @Param(name = "str", type = { "keyword", "text" }) Expression str, @Param(name = "length", type = { "integer" }) Expression length ) { super(source, Arrays.asList(str, length)); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Length.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Length.java index 9f944c62af6a3..e3ea802981273 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Length.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Length.java @@ -12,6 +12,8 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; @@ -27,7 +29,8 @@ public class Length extends UnaryScalarFunction implements EvaluatorMapper { - public Length(Source source, Expression field) { + @FunctionInfo(returnType = "integer", description = "Returns the character length of a string.") + public Length(Source source, @Param(name = "str", type = { "keyword", "text" }) Expression field) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Replace.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Replace.java index 0ed4bd0fe7d02..c0468569216a5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Replace.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Replace.java @@ -12,6 +12,8 @@ import org.elasticsearch.compute.ann.Fixed; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.function.scalar.ScalarFunction; import org.elasticsearch.xpack.ql.expression.gen.script.ScriptTemplate; @@ -37,7 +39,16 @@ public class Replace extends ScalarFunction implements EvaluatorMapper { private final Expression newStr; private final Expression regex; - public Replace(Source source, Expression str, Expression regex, Expression newStr) { + @FunctionInfo( + returnType = "keyword", + description = "The function substitutes in the string any match of the regular expression with the replacement string." + ) + public Replace( + Source source, + @Param(name = "str", type = { "keyword", "text" }) Expression str, + @Param(name = "regex", type = { "keyword", "text" }) Expression regex, + @Param(name = "newStr", type = { "keyword", "text" }) Expression newStr + ) { super(source, Arrays.asList(str, regex, newStr)); this.str = str; this.regex = regex; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Right.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Right.java index 7e96f7a396472..4d94591a007b5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Right.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Right.java @@ -13,6 +13,7 @@ import org.elasticsearch.compute.ann.Fixed; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.function.scalar.ScalarFunction; @@ -42,9 +43,13 @@ public class Right extends ScalarFunction implements EvaluatorMapper { private final Expression length; + @FunctionInfo( + returnType = "keyword", + description = "Return the substring that extracts length chars from the string starting from the right." + ) public Right( Source source, - @Param(name = "string", type = { "keyword" }) Expression str, + @Param(name = "str", type = { "keyword", "text" }) Expression str, @Param(name = "length", type = { "integer" }) Expression length ) { super(source, Arrays.asList(str, length)); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Split.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Split.java index 7e6b3659bbdf0..66d50aa4df061 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Split.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Split.java @@ -13,6 +13,8 @@ import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.function.scalar.BinaryScalarFunction; @@ -32,7 +34,12 @@ * Splits a string on some delimiter into a multivalued string field. */ public class Split extends BinaryScalarFunction implements EvaluatorMapper { - public Split(Source source, Expression str, Expression delim) { + @FunctionInfo(returnType = "keyword", description = "Split a single valued string into multiple strings.") + public Split( + Source source, + @Param(name = "str", type = { "keyword", "text" }) Expression str, + @Param(name = "delim", type = { "keyword", "text" }) Expression delim + ) { super(source, str, delim); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWith.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWith.java index d78ad3df64d1f..0acda09e7bcb1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWith.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWith.java @@ -11,6 +11,8 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.function.scalar.ScalarFunction; import org.elasticsearch.xpack.ql.expression.gen.script.ScriptTemplate; @@ -32,7 +34,15 @@ public class StartsWith extends ScalarFunction implements EvaluatorMapper { private final Expression str; private final Expression prefix; - public StartsWith(Source source, Expression str, Expression prefix) { + @FunctionInfo( + returnType = "boolean", + description = "Returns a boolean that indicates whether a keyword string starts with another string" + ) + public StartsWith( + Source source, + @Param(name = "str", type = { "keyword", "text" }) Expression str, + @Param(name = "prefix", type = { "keyword", "text" }) Expression prefix + ) { super(source, Arrays.asList(str, prefix)); this.str = str; this.prefix = prefix; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java index c287aeafc8d80..d3e5ab53f9e53 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java @@ -12,6 +12,8 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.function.OptionalArgument; import org.elasticsearch.xpack.ql.expression.function.scalar.ScalarFunction; @@ -35,7 +37,16 @@ public class Substring extends ScalarFunction implements OptionalArgument, Evalu private final Expression str, start, length; - public Substring(Source source, Expression str, Expression start, Expression length) { + @FunctionInfo( + returnType = "keyword", + description = "Returns a substring of a string, specified by a start position and an optional length" + ) + public Substring( + Source source, + @Param(name = "str", type = { "keyword", "text" }) Expression str, + @Param(name = "start", type = { "integer" }) Expression start, + @Param(optional = true, name = "length", type = { "integer" }) Expression length + ) { super(source, length == null ? Arrays.asList(str, start) : Arrays.asList(str, start, length)); this.str = str; this.start = start; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/show/ShowFunctions.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/show/ShowFunctions.java index 6c36771ddd870..5a4b90c45f23d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/show/ShowFunctions.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/show/ShowFunctions.java @@ -39,7 +39,7 @@ public ShowFunctions(Source source) { for (var name : List.of("name", "synopsis", "argNames", "argTypes", "argDescriptions", "returnType", "description")) { attributes.add(new ReferenceAttribute(Source.EMPTY, name, KEYWORD)); } - for (var name : List.of("optionalArgs", "variadic")) { + for (var name : List.of("optionalArgs", "variadic", "isAggregation")) { attributes.add(new ReferenceAttribute(Source.EMPTY, name, BOOLEAN)); } } @@ -63,6 +63,7 @@ public List> values(FunctionRegistry functionRegistry) { row.add(signature.description()); row.add(collect(signature, EsqlFunctionRegistry.ArgSignature::optional)); row.add(signature.variadic()); + row.add(signature.isAggregation()); rows.add(row); } rows.sort(Comparator.comparing(x -> ((BytesRef) x.get(0)))); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumTests.java index 99d338f5b36bf..90b1bc22c45e4 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumTests.java @@ -66,7 +66,6 @@ public static Iterable parameters() { data.add(asLongUnsigned(UNSIGNED_LONG_MAX)); return data; })); - return parameterSuppliersFromTypedData(cases); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithTests.java index 63e70b6612470..bc94ab39abccb 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithTests.java @@ -104,6 +104,21 @@ public static Iterable parameters() { equalTo(str.endsWith(suffix)) ); })); + suppliers.add(new TestCaseSupplier("ends_with with text args", () -> { + String str = randomAlphaOfLength(5); + String suffix = randomAlphaOfLength(1); + str = str + suffix; + + return new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BytesRef(str), DataTypes.TEXT, "str"), + new TestCaseSupplier.TypedData(new BytesRef(suffix), DataTypes.TEXT, "suffix") + ), + "EndsWithEvaluator[str=Attribute[channel=0], suffix=Attribute[channel=1]]", + DataTypes.BOOLEAN, + equalTo(str.endsWith(suffix)) + ); + })); return parameterSuppliersFromTypedData(suppliers); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftTests.java index 316bb679f2b70..6c3727455bbf1 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftTests.java @@ -154,6 +154,19 @@ public static Iterable parameters() { equalTo(new BytesRef("")) ); })); + suppliers.add(new TestCaseSupplier("ascii as text input", () -> { + String text = randomAlphaOfLengthBetween(1, 64); + int length = between(1, text.length()); + return new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BytesRef(text), DataTypes.TEXT, "str"), + new TestCaseSupplier.TypedData(length, DataTypes.INTEGER, "length") + ), + "LeftEvaluator[str=Attribute[channel=0], length=Attribute[channel=1]]", + DataTypes.KEYWORD, + equalTo(new BytesRef(unicodeLeftSubstring(text, length))) + ); + })); return parameterSuppliersFromTypedData(suppliers); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LengthTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LengthTests.java index c6eb2d1f2a2c0..e60fbd70d241f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LengthTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LengthTests.java @@ -20,6 +20,7 @@ import org.elasticsearch.xpack.ql.type.DataTypes; import org.hamcrest.Matcher; +import java.util.ArrayList; import java.util.List; import java.util.function.Supplier; @@ -32,7 +33,8 @@ public LengthTests(@Name("TestCase") Supplier testCas @ParametersFactory public static Iterable parameters() { - return parameterSuppliersFromTypedData(List.of(new TestCaseSupplier("length basic test", () -> { + List cases = new ArrayList<>(); + cases.addAll(List.of(new TestCaseSupplier("length basic test", () -> { BytesRef value = new BytesRef(randomAlphaOfLength(between(0, 10000))); return new TestCaseSupplier.TestCase( List.of(new TestCaseSupplier.TypedData(value, DataTypes.KEYWORD, "f")), @@ -40,23 +42,37 @@ public static Iterable parameters() { DataTypes.INTEGER, equalTo(UnicodeUtil.codePointCount(value)) ); - }), - new TestCaseSupplier("empty string", () -> makeTestCase("", 0)), - new TestCaseSupplier("single ascii character", () -> makeTestCase("a", 1)), - new TestCaseSupplier("ascii string", () -> makeTestCase("clump", 5)), - new TestCaseSupplier("3 bytes, 1 code point", () -> makeTestCase("☕", 1)), - new TestCaseSupplier("6 bytes, 2 code points", () -> makeTestCase("❗️", 2)), - new TestCaseSupplier("100 random alpha", () -> makeTestCase(randomAlphaOfLength(100), 100)), - new TestCaseSupplier("100 random code points", () -> makeTestCase(randomUnicodeOfCodepointLength(100), 100)) - )); + }))); + cases.addAll(makeTestCases("empty string", () -> "", 0)); + cases.addAll(makeTestCases("single ascii character", () -> "a", 1)); + cases.addAll(makeTestCases("ascii string", () -> "clump", 5)); + cases.addAll(makeTestCases("3 bytes, 1 code point", () -> "☕", 1)); + cases.addAll(makeTestCases("6 bytes, 2 code points", () -> "❗️", 2)); + cases.addAll(makeTestCases("100 random alpha", () -> randomAlphaOfLength(100), 100)); + cases.addAll(makeTestCases("100 random code points", () -> randomUnicodeOfCodepointLength(100), 100)); + return parameterSuppliersFromTypedData(cases); } - private static TestCaseSupplier.TestCase makeTestCase(String text, int expectedLength) { - return new TestCaseSupplier.TestCase( - List.of(new TestCaseSupplier.TypedData(new BytesRef(text), DataTypes.KEYWORD, "f")), - "LengthEvaluator[val=Attribute[channel=0]]", - DataTypes.INTEGER, - equalTo(expectedLength) + private static List makeTestCases(String title, Supplier text, int expectedLength) { + return List.of( + new TestCaseSupplier( + title + " with keyword", + () -> new TestCaseSupplier.TestCase( + List.of(new TestCaseSupplier.TypedData(new BytesRef(text.get()), DataTypes.KEYWORD, "f")), + "LengthEvaluator[val=Attribute[channel=0]]", + DataTypes.INTEGER, + equalTo(expectedLength) + ) + ), + new TestCaseSupplier( + title + " with text", + () -> new TestCaseSupplier.TestCase( + List.of(new TestCaseSupplier.TypedData(new BytesRef(text.get()), DataTypes.TEXT, "f")), + "LengthEvaluator[val=Attribute[channel=0]]", + DataTypes.INTEGER, + equalTo(expectedLength) + ) + ) ); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightTests.java index 0eeb312512b30..500580585ff90 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightTests.java @@ -154,7 +154,19 @@ public static Iterable parameters() { equalTo(new BytesRef("")) ); })); - + suppliers.add(new TestCaseSupplier("ascii as text", () -> { + String text = randomAlphaOfLengthBetween(1, 64); + int length = between(1, text.length()); + return new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BytesRef(text), DataTypes.TEXT, "str"), + new TestCaseSupplier.TypedData(length, DataTypes.INTEGER, "length") + ), + "RightEvaluator[str=Attribute[channel=0], length=Attribute[channel=1]]", + DataTypes.KEYWORD, + equalTo(new BytesRef(unicodeRightSubstring(text, length))) + ); + })); return parameterSuppliersFromTypedData(suppliers); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitTests.java index e0611c7125e6e..56793bd1730d2 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitTests.java @@ -57,6 +57,22 @@ public static Iterable parameters() { DataTypes.KEYWORD, equalTo(strings.size() == 1 ? strings.get(0) : strings) ); + }), new TestCaseSupplier("split basic test with text input", () -> { + String delimiter = randomAlphaOfLength(1); + List strings = IntStream.range(0, between(1, 5)) + .mapToObj(i -> randomValueOtherThanMany(s -> s.contains(delimiter), () -> randomAlphaOfLength(4))) + .map(BytesRef::new) + .collect(Collectors.toList()); + String str = strings.stream().map(BytesRef::utf8ToString).collect(joining(delimiter)); + return new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BytesRef(str), DataTypes.TEXT, "str"), + new TestCaseSupplier.TypedData(new BytesRef(delimiter), DataTypes.TEXT, "delim") + ), + "SplitVariableEvaluator[str=Attribute[channel=0], delim=Attribute[channel=1]]", + DataTypes.KEYWORD, + equalTo(strings.size() == 1 ? strings.get(0) : strings) + ); }))); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithTests.java index 6eacea1d02987..961e27eea36c4 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithTests.java @@ -46,6 +46,21 @@ public static Iterable parameters() { DataTypes.BOOLEAN, equalTo(str.startsWith(prefix)) ); + }), new TestCaseSupplier("Starts with basic test with text args", () -> { + String str = randomAlphaOfLength(5); + String prefix = randomAlphaOfLength(5); + if (randomBoolean()) { + str = prefix + str; + } + return new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BytesRef(str), DataTypes.TEXT, "str"), + new TestCaseSupplier.TypedData(new BytesRef(prefix), DataTypes.TEXT, "prefix") + ), + "StartsWithEvaluator[str=Attribute[channel=0], prefix=Attribute[channel=1]]", + DataTypes.BOOLEAN, + equalTo(str.startsWith(prefix)) + ); }))); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringTests.java index fd9cb29ec62c4..8dbc9eaeeccd6 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringTests.java @@ -50,6 +50,20 @@ public static Iterable parameters() { DataTypes.KEYWORD, equalTo(new BytesRef(text.substring(start - 1, start + length - 1))) ); + }), new TestCaseSupplier("Substring basic test with text input", () -> { + int start = between(1, 8); + int length = between(1, 10 - start); + String text = randomAlphaOfLength(10); + return new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BytesRef(text), DataTypes.TEXT, "str"), + new TestCaseSupplier.TypedData(start, DataTypes.INTEGER, "start"), + new TestCaseSupplier.TypedData(length, DataTypes.INTEGER, "end") + ), + "SubstringEvaluator[str=Attribute[channel=0], start=Attribute[channel=1], length=Attribute[channel=2]]", + DataTypes.KEYWORD, + equalTo(new BytesRef(text.substring(start - 1, start + length - 1))) + ); }))); } From af336e7495bc59fbff3f20260b9198742cdaa132 Mon Sep 17 00:00:00 2001 From: David Roberts Date: Thu, 11 Jan 2024 09:34:35 +0000 Subject: [PATCH 33/75] [ML] Unmute MLModelDeploymentsUpgradeIT testTrainedModelDeployment (#104222) It seems that this test failure was purely down to inconsistency of ml-cpp dependencies, and not a real problem. Therefore the test can be unmuted. Fixes #104193 --- .../org/elasticsearch/upgrades/MLModelDeploymentsUpgradeIT.java | 2 -- 1 file changed, 2 deletions(-) diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MLModelDeploymentsUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MLModelDeploymentsUpgradeIT.java index fd6b7200ff004..d935672e0a243 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MLModelDeploymentsUpgradeIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MLModelDeploymentsUpgradeIT.java @@ -8,7 +8,6 @@ package org.elasticsearch.upgrades; import org.apache.http.util.EntityUtils; -import org.apache.lucene.tests.util.LuceneTestCase; import org.elasticsearch.Version; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; @@ -32,7 +31,6 @@ import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.oneOf; -@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/104193") public class MLModelDeploymentsUpgradeIT extends AbstractUpgradeTestCase { // See PyTorchModelIT for how this model was created From cc9fba36e6fe3c2fdc91752d1ebff4e012d88c6a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20Fred=C3=A9n?= <109296772+jfreden@users.noreply.github.com> Date: Thu, 11 Jan 2024 10:40:52 +0100 Subject: [PATCH 34/75] Add ApiKey expiration time to audit log (#103959) Follow up to PR: https://github.com/elastic/elasticsearch/pull/103453 --- docs/changelog/103959.yaml | 5 +++++ .../security/auditing/event-types.asciidoc | 6 +++--- .../audit/logfile/LoggingAuditTrail.java | 4 ++++ .../audit/logfile/LoggingAuditTrailTests.java | 20 +++++++++++-------- 4 files changed, 24 insertions(+), 11 deletions(-) create mode 100644 docs/changelog/103959.yaml diff --git a/docs/changelog/103959.yaml b/docs/changelog/103959.yaml new file mode 100644 index 0000000000000..4c8b4413b95f8 --- /dev/null +++ b/docs/changelog/103959.yaml @@ -0,0 +1,5 @@ +pr: 103959 +summary: Add `ApiKey` expiration time to audit log +area: Security +type: enhancement +issues: [] diff --git a/docs/reference/security/auditing/event-types.asciidoc b/docs/reference/security/auditing/event-types.asciidoc index 9539ea38b5a6b..a856336dba8d0 100644 --- a/docs/reference/security/auditing/event-types.asciidoc +++ b/docs/reference/security/auditing/event-types.asciidoc @@ -255,7 +255,7 @@ event action. "applications":[],"run_as":[]},{"cluster":["all"],"indices":[{"names": ["index-b*"],"privileges":["all"]}],"applications":[],"run_as":[]}], "metadata":{"application":"my-application","environment":{"level": 1, -"tags":["dev","staging"]}}}}} +"tags":["dev","staging"]}},"expiration":"10d"}}} ==== [[event-change-apikeys]] @@ -281,7 +281,7 @@ event action. "applications":[],"run_as":[]},{"cluster":["all"],"indices":[{"names": ["index-b*"],"privileges":["all"]}],"applications":[],"run_as":[]}], "metadata":{"application":"my-application","environment":{"level":1, -"tags":["dev","staging"]}}}}} +"tags":["dev","staging"]}},"expiration":"10d"}}} ==== [[event-delete-privileges]] @@ -797,7 +797,7 @@ The `role_descriptors` objects have the same schema as the `role_descriptor` object that is part of the above `role` config object. The object for an API key update will differ in that it will not include -a `name` or `expiration`. +a `name`. `grant` :: An object like: + diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrail.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrail.java index e2b9c36c1d0ee..87c372f561757 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrail.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrail.java @@ -1325,6 +1325,10 @@ private static void withBaseUpdateApiKeyFields(final XContentBuilder builder, fi // because it replaces any metadata previously associated with the API key builder.field("metadata", baseUpdateApiKeyRequest.getMetadata()); } + builder.field( + "expiration", + baseUpdateApiKeyRequest.getExpiration() != null ? baseUpdateApiKeyRequest.getExpiration().toString() : null + ); } private static void withRoleDescriptor(XContentBuilder builder, RoleDescriptor roleDescriptor) throws IOException { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrailTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrailTests.java index 9c48354b951d8..2438e625259d1 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrailTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrailTests.java @@ -627,21 +627,23 @@ public void testSecurityConfigChangeEventFormattingForRoles() throws IOException CapturingLogger.output(logger.getName(), Level.INFO).clear(); final String keyId = randomAlphaOfLength(10); + final TimeValue newExpiration = randomFrom(ApiKeyTests.randomFutureExpirationTime(), null); final var updateApiKeyRequest = new UpdateApiKeyRequest( keyId, randomBoolean() ? null : keyRoleDescriptors, metadataWithSerialization.metadata(), - ApiKeyTests.randomFutureExpirationTime() + newExpiration ); auditTrail.accessGranted(requestId, authentication, UpdateApiKeyAction.NAME, updateApiKeyRequest, authorizationInfo); final var expectedUpdateKeyAuditEventString = String.format( Locale.ROOT, """ - "change":{"apikey":{"id":"%s","type":"rest"%s%s}}\ + "change":{"apikey":{"id":"%s","type":"rest"%s%s,"expiration":%s}}\ """, keyId, updateApiKeyRequest.getRoleDescriptors() == null ? "" : "," + roleDescriptorsStringBuilder, - updateApiKeyRequest.getMetadata() == null ? "" : Strings.format(",\"metadata\":%s", metadataWithSerialization.serialization()) + updateApiKeyRequest.getMetadata() == null ? "" : Strings.format(",\"metadata\":%s", metadataWithSerialization.serialization()), + updateApiKeyRequest.getExpiration() == null ? null : Strings.format("\"%s\"", newExpiration) ); output = CapturingLogger.output(logger.getName(), Level.INFO); assertThat(output.size(), is(2)); @@ -664,13 +666,13 @@ public void testSecurityConfigChangeEventFormattingForRoles() throws IOException keyIds, randomBoolean() ? null : keyRoleDescriptors, metadataWithSerialization.metadata(), - ApiKeyTests.randomFutureExpirationTime() + null ); auditTrail.accessGranted(requestId, authentication, BulkUpdateApiKeyAction.NAME, bulkUpdateApiKeyRequest, authorizationInfo); final var expectedBulkUpdateKeyAuditEventString = String.format( Locale.ROOT, """ - "change":{"apikeys":{"ids":[%s],"type":"rest"%s%s}}\ + "change":{"apikeys":{"ids":[%s],"type":"rest"%s%s,"expiration":null}}\ """, bulkUpdateApiKeyRequest.getIds().stream().map(s -> Strings.format("\"%s\"", s)).collect(Collectors.joining(",")), bulkUpdateApiKeyRequest.getRoleDescriptors() == null ? "" : "," + roleDescriptorsStringBuilder, @@ -875,22 +877,24 @@ public void testSecurityConfigChangeEventForCrossClusterApiKeys() throws IOExcep updateMetadataWithSerialization = randomApiKeyMetadataWithSerialization(); } + final TimeValue newExpiration = randomFrom(ApiKeyTests.randomFutureExpirationTime(), null); final var updateRequest = new UpdateCrossClusterApiKeyRequest( createRequest.getId(), updateAccess, updateMetadataWithSerialization.metadata(), - ApiKeyTests.randomFutureExpirationTime() + newExpiration ); auditTrail.accessGranted(requestId, authentication, UpdateCrossClusterApiKeyAction.NAME, updateRequest, authorizationInfo); final String expectedUpdateAuditEventString = String.format( Locale.ROOT, """ - "change":{"apikey":{"id":"%s","type":"cross_cluster"%s%s}}\ + "change":{"apikey":{"id":"%s","type":"cross_cluster"%s%s,"expiration":%s}}\ """, createRequest.getId(), updateAccess == null ? "" : ",\"role_descriptors\":" + accessWithSerialization.serialization(), - updateRequest.getMetadata() == null ? "" : Strings.format(",\"metadata\":%s", updateMetadataWithSerialization.serialization()) + updateRequest.getMetadata() == null ? "" : Strings.format(",\"metadata\":%s", updateMetadataWithSerialization.serialization()), + newExpiration == null ? null : String.format(Locale.ROOT, "\"%s\"", newExpiration) ); output = CapturingLogger.output(logger.getName(), Level.INFO); From 147484b059a49b4eef55a113e361812872abde54 Mon Sep 17 00:00:00 2001 From: ShourieG <105607378+ShourieG@users.noreply.github.com> Date: Thu, 11 Jan 2024 16:00:02 +0530 Subject: [PATCH 35/75] [elasticsearch][processors] - Added support for override flag in rename processor (#103565) * added override flag for rename processer along with factory tests * added yaml tests for rename processor using the override flag * updated renameProcessor tests to include override flag as a parameter * updated rename processor tests to incorporate override flag = true scenario * updated rename processor asciidoc with override option * updated rename processor asciidoc with override option * removed unnecessary supresswarnings tag * corrected formatting errors * updated processor tests * fixed yaml tests * Prefer early throw style here * Whitespace * Move and rewrite this test It's just a simple test of the primary behavior of the rename processor, so put it first and simplify it. * Rename this test It doesn't actually exercise template snippets * Tidy up this test --------- Co-authored-by: Joe Gallo --- .../ingest/processors/rename.asciidoc | 3 +- .../ingest/common/RenameProcessor.java | 16 +++-- .../common/DotExpanderProcessorTests.java | 1 + .../common/RenameProcessorFactoryTests.java | 14 ++++ .../ingest/common/RenameProcessorTests.java | 42 +++++++---- .../rest-api-spec/test/ingest/280_rename.yml | 72 ++++++++++++++++++- 6 files changed, 127 insertions(+), 21 deletions(-) diff --git a/docs/reference/ingest/processors/rename.asciidoc b/docs/reference/ingest/processors/rename.asciidoc index 9b0eeaa157d55..82b97f48519c9 100644 --- a/docs/reference/ingest/processors/rename.asciidoc +++ b/docs/reference/ingest/processors/rename.asciidoc @@ -13,7 +13,8 @@ Renames an existing field. If the field doesn't exist or the new name is already | Name | Required | Default | Description | `field` | yes | - | The field to be renamed. Supports <>. | `target_field` | yes | - | The new name of the field. Supports <>. -| `ignore_missing` | no | `false` | If `true` and `field` does not exist, the processor quietly exits without modifying the document +| `ignore_missing` | no | `false` | If `true` and `field` does not exist, the processor quietly exits without modifying the document. +| `override` | no | `false` | If `true`, the processor will update pre-existing non-null-valued fields. When set to `false`, such fields will not be touched. include::common-options.asciidoc[] |====== diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/RenameProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/RenameProcessor.java index d73eb1906ac5b..85affa225b882 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/RenameProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/RenameProcessor.java @@ -23,22 +23,24 @@ public final class RenameProcessor extends AbstractProcessor { public static final String TYPE = "rename"; - private final TemplateScript.Factory field; private final TemplateScript.Factory targetField; private final boolean ignoreMissing; + private final boolean overrideEnabled; RenameProcessor( String tag, String description, TemplateScript.Factory field, TemplateScript.Factory targetField, - boolean ignoreMissing + boolean ignoreMissing, + boolean overrideEnabled ) { super(tag, description); this.field = field; this.targetField = targetField; this.ignoreMissing = ignoreMissing; + this.overrideEnabled = overrideEnabled; } TemplateScript.Factory getField() { @@ -53,6 +55,10 @@ boolean isIgnoreMissing() { return ignoreMissing; } + public boolean isOverrideEnabled() { + return overrideEnabled; + } + @Override public IngestDocument execute(IngestDocument document) { String path = document.renderTemplate(field); @@ -63,12 +69,13 @@ public IngestDocument execute(IngestDocument document) { throw new IllegalArgumentException("field [" + path + "] doesn't exist"); } } + // We fail here if the target field point to an array slot that is out of range. // If we didn't do this then we would fail if we set the value in the target_field // and then on failure processors would not see that value we tried to rename as we already // removed it. String target = document.renderTemplate(targetField); - if (document.hasField(target, true)) { + if (document.hasField(target, true) && overrideEnabled == false) { throw new IllegalArgumentException("field [" + target + "] already exists"); } @@ -115,7 +122,8 @@ public RenameProcessor create( scriptService ); boolean ignoreMissing = ConfigurationUtils.readBooleanProperty(TYPE, processorTag, config, "ignore_missing", false); - return new RenameProcessor(processorTag, description, fieldTemplate, targetFieldTemplate, ignoreMissing); + boolean overrideEnabled = ConfigurationUtils.readBooleanProperty(TYPE, processorTag, config, "override", false); + return new RenameProcessor(processorTag, description, fieldTemplate, targetFieldTemplate, ignoreMissing, overrideEnabled); } } } diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DotExpanderProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DotExpanderProcessorTests.java index 3e3c7af964861..1cd0a0ead8785 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DotExpanderProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DotExpanderProcessorTests.java @@ -83,6 +83,7 @@ public void testEscapeFields_valueField() throws Exception { null, new TestTemplateService.MockTemplateScript.Factory("foo"), new TestTemplateService.MockTemplateScript.Factory("foo.bar"), + false, false ); processor.execute(document); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RenameProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RenameProcessorFactoryTests.java index 2299081eb22cd..005cbc260f69a 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RenameProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RenameProcessorFactoryTests.java @@ -37,6 +37,7 @@ public void testCreate() throws Exception { assertThat(renameProcessor.getField().newInstance(Map.of()).execute(), equalTo("old_field")); assertThat(renameProcessor.getTargetField().newInstance(Map.of()).execute(), equalTo("new_field")); assertThat(renameProcessor.isIgnoreMissing(), equalTo(false)); + assertThat(renameProcessor.isOverrideEnabled(), equalTo(false)); } public void testCreateWithIgnoreMissing() throws Exception { @@ -52,6 +53,19 @@ public void testCreateWithIgnoreMissing() throws Exception { assertThat(renameProcessor.isIgnoreMissing(), equalTo(true)); } + public void testCreateWithEnableOverride() throws Exception { + Map config = new HashMap<>(); + config.put("field", "old_field"); + config.put("target_field", "new_field"); + config.put("override", true); + String processorTag = randomAlphaOfLength(10); + RenameProcessor renameProcessor = factory.create(null, processorTag, null, config); + assertThat(renameProcessor.getTag(), equalTo(processorTag)); + assertThat(renameProcessor.getField().newInstance(Map.of()).execute(), equalTo("old_field")); + assertThat(renameProcessor.getTargetField().newInstance(Map.of()).execute(), equalTo("new_field")); + assertThat(renameProcessor.isOverrideEnabled(), equalTo(true)); + } + public void testCreateNoFieldPresent() throws Exception { Map config = new HashMap<>(); config.put("target_field", "new_field"); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RenameProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RenameProcessorTests.java index f472e9d9bacd4..9765320ef4d57 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RenameProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RenameProcessorTests.java @@ -36,7 +36,7 @@ public void testRename() throws Exception { do { newFieldName = RandomDocumentPicks.randomFieldName(random()); } while (RandomDocumentPicks.canAddField(newFieldName, ingestDocument) == false || newFieldName.equals(fieldName)); - Processor processor = createRenameProcessor(fieldName, newFieldName, false); + Processor processor = createRenameProcessor(fieldName, newFieldName, false, false); processor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue(newFieldName, Object.class), equalTo(fieldValue)); } @@ -54,7 +54,7 @@ public void testRenameArrayElement() throws Exception { document.put("one", one); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); - Processor processor = createRenameProcessor("list.0", "item", false); + Processor processor = createRenameProcessor("list.0", "item", false, false); processor.execute(ingestDocument); Object actualObject = ingestDocument.getSourceAndMetadata().get("list"); assertThat(actualObject, instanceOf(List.class)); @@ -67,7 +67,7 @@ public void testRenameArrayElement() throws Exception { assertThat(actualObject, instanceOf(String.class)); assertThat(actualObject, equalTo("item1")); - processor = createRenameProcessor("list.0", "list.3", false); + processor = createRenameProcessor("list.0", "list.3", false, false); try { processor.execute(ingestDocument); fail("processor execute should have failed"); @@ -82,7 +82,7 @@ public void testRenameArrayElement() throws Exception { public void testRenameNonExistingField() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); String fieldName = RandomDocumentPicks.randomFieldName(random()); - Processor processor = createRenameProcessor(fieldName, RandomDocumentPicks.randomFieldName(random()), false); + Processor processor = createRenameProcessor(fieldName, RandomDocumentPicks.randomFieldName(random()), false, false); try { processor.execute(ingestDocument); fail("processor execute should have failed"); @@ -95,11 +95,11 @@ public void testRenameNonExistingFieldWithIgnoreMissing() throws Exception { IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); String fieldName = RandomDocumentPicks.randomFieldName(random()); - Processor processor = createRenameProcessor(fieldName, RandomDocumentPicks.randomFieldName(random()), true); + Processor processor = createRenameProcessor(fieldName, RandomDocumentPicks.randomFieldName(random()), true, false); processor.execute(ingestDocument); assertIngestDocument(originalIngestDocument, ingestDocument); - Processor processor1 = createRenameProcessor("", RandomDocumentPicks.randomFieldName(random()), true); + Processor processor1 = createRenameProcessor("", RandomDocumentPicks.randomFieldName(random()), true, false); processor1.execute(ingestDocument); assertIngestDocument(originalIngestDocument, ingestDocument); } @@ -110,6 +110,7 @@ public void testRenameNewFieldAlreadyExists() throws Exception { Processor processor = createRenameProcessor( RandomDocumentPicks.randomExistingFieldName(random(), ingestDocument), fieldName, + false, false ); try { @@ -125,7 +126,7 @@ public void testRenameExistingFieldNullValue() throws Exception { String fieldName = RandomDocumentPicks.randomFieldName(random()); ingestDocument.setFieldValue(fieldName, (Object) null); String newFieldName = randomValueOtherThanMany(ingestDocument::hasField, () -> RandomDocumentPicks.randomFieldName(random())); - Processor processor = createRenameProcessor(fieldName, newFieldName, false); + Processor processor = createRenameProcessor(fieldName, newFieldName, false, false); processor.execute(ingestDocument); if (newFieldName.startsWith(fieldName + '.')) { assertThat(ingestDocument.getFieldValue(fieldName, Object.class), instanceOf(Map.class)); @@ -148,7 +149,7 @@ public void testRenameAtomicOperationSetFails() throws Exception { } }), "list", new Metadata.FieldProperty<>(Object.class, true, true, null)) ); - Processor processor = createRenameProcessor("list", "new_field", false); + Processor processor = createRenameProcessor("list", "new_field", false, false); try { processor.execute(ingestDocument); fail("processor execute should have failed"); @@ -167,7 +168,7 @@ public void testRenameAtomicOperationRemoveFails() throws Exception { metadata, Map.of("list", new Metadata.FieldProperty<>(Object.class, false, true, null)) ); - Processor processor = createRenameProcessor("list", "new_field", false); + Processor processor = createRenameProcessor("list", "new_field", false, false); try { processor.execute(ingestDocument); fail("processor execute should have failed"); @@ -182,30 +183,41 @@ public void testRenameLeafIntoBranch() throws Exception { Map source = new HashMap<>(); source.put("foo", "bar"); IngestDocument ingestDocument = TestIngestDocument.withDefaultVersion(source); - Processor processor1 = createRenameProcessor("foo", "foo.bar", false); + Processor processor1 = createRenameProcessor("foo", "foo.bar", false, false); processor1.execute(ingestDocument); assertThat(ingestDocument.getFieldValue("foo", Map.class), equalTo(Map.of("bar", "bar"))); assertThat(ingestDocument.getFieldValue("foo.bar", String.class), equalTo("bar")); - Processor processor2 = createRenameProcessor("foo.bar", "foo.bar.baz", false); + Processor processor2 = createRenameProcessor("foo.bar", "foo.bar.baz", false, false); processor2.execute(ingestDocument); assertThat(ingestDocument.getFieldValue("foo", Map.class), equalTo(Map.of("bar", Map.of("baz", "bar")))); assertThat(ingestDocument.getFieldValue("foo.bar", Map.class), equalTo(Map.of("baz", "bar"))); assertThat(ingestDocument.getFieldValue("foo.bar.baz", String.class), equalTo("bar")); - // for fun lets try to restore it (which don't allow today) - Processor processor3 = createRenameProcessor("foo.bar.baz", "foo", false); + // try to restore it (will fail, not allowed without the override flag) + Processor processor3 = createRenameProcessor("foo.bar.baz", "foo", false, false); Exception e = expectThrows(IllegalArgumentException.class, () -> processor3.execute(ingestDocument)); assertThat(e.getMessage(), equalTo("field [foo] already exists")); } - private RenameProcessor createRenameProcessor(String field, String targetField, boolean ignoreMissing) { + public void testRenameOverride() throws Exception { + Map source = new HashMap<>(); + source.put("event.original", "existing_message"); + source.put("message", "new_message"); + IngestDocument ingestDocument = TestIngestDocument.withDefaultVersion(source); + Processor processor1 = createRenameProcessor("message", "event.original", false, true); + processor1.execute(ingestDocument); + assertThat(ingestDocument.getFieldValue("event.original", String.class), equalTo("new_message")); + } + + private RenameProcessor createRenameProcessor(String field, String targetField, boolean ignoreMissing, boolean overrideEnabled) { return new RenameProcessor( randomAlphaOfLength(10), null, new TestTemplateService.MockTemplateScript.Factory(field), new TestTemplateService.MockTemplateScript.Factory(targetField), - ignoreMissing + ignoreMissing, + overrideEnabled ); } } diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/280_rename.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/280_rename.yml index 5e38f09dbd024..26a0d5eef50ae 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/280_rename.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/280_rename.yml @@ -5,12 +5,45 @@ teardown: id: "1" ignore: 404 +--- +"Test Rename Processor": + - do: + ingest.put_pipeline: + id: "1" + body: > + { + "processors": [ + { + "rename" : { + "field" : "foo", + "target_field": "bar" + } + } + ] + } + - match: { acknowledged: true } + + - do: + index: + index: test + id: "1" + pipeline: "1" + body: { + foo: "test" + } + + - do: + get: + index: test + id: "1" + - match: { _source.bar: "test" } + --- "Test Rename Processor with template snippets and ignore_missing": - do: ingest.put_pipeline: id: "1" - body: > + body: > { "processors": [ { @@ -38,3 +71,40 @@ teardown: index: test id: "1" - match: { _source.message: "test" } + +--- +"Test Rename Processor with override": + - do: + ingest.put_pipeline: + id: "1" + body: > + { + "processors": [ + { + "rename" : { + "field" : "message", + "target_field": "event.original", + "override": true + } + } + ] + } + - match: { acknowledged: true } + + - do: + index: + index: test + id: "1" + pipeline: "1" + body: { + message: "overridden original message", + event: { + original: "original message" + } + } + + - do: + get: + index: test + id: "1" + - match: { _source.event.original: "overridden original message" } From 4c531bfe172a47038f28bdf2aff19eccb5f24288 Mon Sep 17 00:00:00 2001 From: Jedr Blaszyk Date: Thu, 11 Jan 2024 12:18:40 +0100 Subject: [PATCH 36/75] [Connector API] Fix bug in configuration validation parser (#104198) --- docs/changelog/104198.yaml | 5 ++ .../335_connector_update_configuration.yml | 22 +++++++ .../ConfigurationValidation.java | 32 +++++++--- .../ConfigurationValidationType.java | 3 +- .../ConnectorConfigurationTests.java | 63 +++++++++++++++++++ 5 files changed, 114 insertions(+), 11 deletions(-) create mode 100644 docs/changelog/104198.yaml diff --git a/docs/changelog/104198.yaml b/docs/changelog/104198.yaml new file mode 100644 index 0000000000000..0b5b4680c2d88 --- /dev/null +++ b/docs/changelog/104198.yaml @@ -0,0 +1,5 @@ +pr: 104198 +summary: "[Connector API] Fix bug in configuration validation parser" +area: Application +type: bug +issues: [] diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/335_connector_update_configuration.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/335_connector_update_configuration.yml index 5a012853b4bf9..df4a640a0495d 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/335_connector_update_configuration.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/335_connector_update_configuration.yml @@ -72,8 +72,18 @@ setup: type: str ui_restrictions: [ ] validations: + - constraint: [123, 456, 789] + type: included_in + - constraint: ["string 1", "string 2", "string 3"] + type: included_in - constraint: 0 type: greater_than + - constraint: 42 + type: less_than + - constraint: int + type: list_type + - constraint: "\\d+" + type: regex value: 456 - match: { result: updated } @@ -84,6 +94,18 @@ setup: - match: { configuration.some_field.value: 456 } - match: { status: configured } + - match: { configuration.some_field.validations.0.constraint: [123, 456, 789] } + - match: { configuration.some_field.validations.0.type: included_in } + - match: { configuration.some_field.validations.1.constraint: ["string 1", "string 2", "string 3"] } + - match: { configuration.some_field.validations.1.type: included_in } + - match: { configuration.some_field.validations.2.constraint: 0 } + - match: { configuration.some_field.validations.2.type: greater_than } + - match: { configuration.some_field.validations.3.constraint: 42 } + - match: { configuration.some_field.validations.3.type: less_than } + - match: { configuration.some_field.validations.4.constraint: int } + - match: { configuration.some_field.validations.4.type: list_type } + - match: { configuration.some_field.validations.5.constraint: "\\d+" } + - match: { configuration.some_field.validations.5.type: regex } --- "Update Connector Configuration with null tooltip": diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationValidation.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationValidation.java index 476ae113398dc..8f05e67ecb14d 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationValidation.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationValidation.java @@ -36,7 +36,7 @@ public class ConfigurationValidation implements Writeable, ToXContentObject { * Constructs a new ConfigurationValidation instance with specified constraint and type. * This constructor initializes the object with a given validation constraint and its associated validation type. * - * @param constraint The validation constraint, represented as an Object. + * @param constraint The validation constraint (string, number or list), represented as generic Object type. * @param type The type of configuration validation, specified as an instance of {@link ConfigurationValidationType}. */ private ConfigurationValidation(Object constraint, ConfigurationValidationType type) { @@ -59,14 +59,12 @@ public ConfigurationValidation(StreamInput in) throws IOException { ); static { - PARSER.declareField(constructorArg(), (p, c) -> { - if (p.currentToken() == XContentParser.Token.VALUE_STRING) { - return p.text(); - } else if (p.currentToken() == XContentParser.Token.VALUE_NUMBER) { - return p.numberValue(); - } - throw new XContentParseException("Unsupported token [" + p.currentToken() + "]"); - }, CONSTRAINT_FIELD, ObjectParser.ValueType.VALUE); + PARSER.declareField( + constructorArg(), + (p, c) -> parseConstraintValue(p), + CONSTRAINT_FIELD, + ObjectParser.ValueType.VALUE_OBJECT_ARRAY + ); PARSER.declareField( constructorArg(), (p, c) -> ConfigurationValidationType.validationType(p.text()), @@ -75,6 +73,22 @@ public ConfigurationValidation(StreamInput in) throws IOException { ); } + /** + * Parses the value of a constraint from the XContentParser stream. + * This method is designed to handle various types of constraint values as per the connector's protocol original specification. + * The constraints can be of type string, number, or list of values. + */ + private static Object parseConstraintValue(XContentParser p) throws IOException { + if (p.currentToken() == XContentParser.Token.VALUE_STRING) { + return p.text(); + } else if (p.currentToken() == XContentParser.Token.VALUE_NUMBER) { + return p.numberValue(); + } else if (p.currentToken() == XContentParser.Token.START_ARRAY) { + return p.list(); + } + throw new XContentParseException("Unsupported token [" + p.currentToken() + "]"); + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationValidationType.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationValidationType.java index 2118014f4a286..7c064014a95ba 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationValidationType.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationValidationType.java @@ -14,8 +14,7 @@ public enum ConfigurationValidationType { GREATER_THAN, LIST_TYPE, INCLUDED_IN, - REGEX, - UNSET; + REGEX; @Override public String toString() { diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorConfigurationTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorConfigurationTests.java index 44d9c0fcf9e76..9b1f9c60d1607 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorConfigurationTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorConfigurationTests.java @@ -85,6 +85,69 @@ public void testToXContent() throws IOException { assertToXContentEquivalent(originalBytes, toXContent(parsed, XContentType.JSON, humanReadable), XContentType.JSON); } + public void testToXContentWithMultipleConstraintTypes() throws IOException { + String content = XContentHelper.stripWhitespace(""" + { + "default_value": null, + "depends_on": [ + { + "field": "some_field", + "value": true + } + ], + "display": "textbox", + "label": "Very important field", + "options": [], + "order": 4, + "required": true, + "sensitive": false, + "tooltip": "Wow, this tooltip is useful.", + "type": "str", + "ui_restrictions": [], + "validations": [ + { + "constraint": 32, + "type": "less_than" + }, + { + "constraint": "^\\\\\\\\d{4}-\\\\\\\\d{2}-\\\\\\\\d{2}$", + "type": "regex" + }, + { + "constraint": "int", + "type": "list_type" + }, + { + "constraint": [ + 1, + 2, + 3 + ], + "type": "included_in" + }, + { + "constraint": [ + "string_1", + "string_2", + "string_3" + ], + "type": "included_in" + } + ], + "value": "" + } + """); + + ConnectorConfiguration configuration = ConnectorConfiguration.fromXContentBytes(new BytesArray(content), XContentType.JSON); + boolean humanReadable = true; + BytesReference originalBytes = toShuffledXContent(configuration, XContentType.JSON, ToXContent.EMPTY_PARAMS, humanReadable); + ConnectorConfiguration parsed; + try (XContentParser parser = createParser(XContentType.JSON.xContent(), originalBytes)) { + parsed = ConnectorConfiguration.fromXContent(parser); + } + assertToXContentEquivalent(originalBytes, toXContent(parsed, XContentType.JSON, humanReadable), XContentType.JSON); + } + private void assertTransportSerialization(ConnectorConfiguration testInstance) throws IOException { ConnectorConfiguration deserializedInstance = copyInstance(testInstance); assertNotSame(testInstance, deserializedInstance); From c77cd975885d832e80090ec99b818eb08ab26624 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Thu, 11 Jan 2024 07:26:59 -0500 Subject: [PATCH 37/75] ESQL: Slightly harden rest tests (#104231) This attempts to slightly harder our formatting tests by adding a `SORT` to them. We've seen some failures that look like the data not arring in expected order. And, without a sort, the order is technically undefined. Closes #104195 --- .../xpack/esql/qa/rest/RestEsqlTestCase.java | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java index 86ec01b7f5266..100895feade16 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java @@ -233,7 +233,7 @@ public void testColumnarMode() throws IOException { bulkLoadTestData(docCount); boolean columnar = randomBoolean(); - var query = builder().query(fromIndex() + " | keep keyword, integer"); + var query = builder().query(fromIndex() + " | keep keyword, integer | sort integer asc"); if (columnar || randomBoolean()) { query.columnar(columnar); } @@ -263,28 +263,27 @@ public void testColumnarMode() throws IOException { public void testTextMode() throws IOException { int count = randomIntBetween(0, 100); bulkLoadTestData(count); - var builder = builder().query(fromIndex() + " | keep keyword, integer | limit 100"); + var builder = builder().query(fromIndex() + " | keep keyword, integer | sort integer asc | limit 100"); assertEquals(expectedTextBody("txt", count, null), runEsqlAsTextWithFormat(builder, "txt", null)); } public void testCSVMode() throws IOException { int count = randomIntBetween(0, 100); bulkLoadTestData(count); - var builder = builder().query(fromIndex() + " | keep keyword, integer | limit 100"); + var builder = builder().query(fromIndex() + " | keep keyword, integer | sort integer asc | limit 100"); assertEquals(expectedTextBody("csv", count, '|'), runEsqlAsTextWithFormat(builder, "csv", '|')); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/104195") public void testTSVMode() throws IOException { int count = randomIntBetween(0, 100); bulkLoadTestData(count); - var builder = builder().query(fromIndex() + " | keep keyword, integer | limit 100"); + var builder = builder().query(fromIndex() + " | keep keyword, integer | sort integer asc | limit 100"); assertEquals(expectedTextBody("tsv", count, null), runEsqlAsTextWithFormat(builder, "tsv", null)); } public void testCSVNoHeaderMode() throws IOException { bulkLoadTestData(1); - var builder = builder().query(fromIndex() + " | keep keyword, integer | limit 100"); + var builder = builder().query(fromIndex() + " | keep keyword, integer | sort integer asc | limit 100"); Request request = prepareRequest(SYNC); String mediaType = attachBody(builder.build(), request); RequestOptions.Builder options = request.getOptions().toBuilder(); From f84bda7c00f628b4a28408d05e64465b54bda5bb Mon Sep 17 00:00:00 2001 From: Nikolaj Volgushev Date: Thu, 11 Jan 2024 13:39:17 +0100 Subject: [PATCH 38/75] Re-build remote cluster connections on credential changes (#103460) This PR builds on https://github.com/elastic/elasticsearch/pull/102798 by adding automatic remote connection rebuilding on cluster credentials changes. In particular, we rebuild a remote cluster connection if a credential for the associated cluster is newly added (i.e., we are moving from RCS 1.0 -> RCS 2.0) or removed (moving from RCS 2.0 -> RCS 1.0). A connection rebuild allows us to associate the correct profile (`_remote_server` in case of RCS 2.0, or "regular" transport profile for RCS 1.0) without requiring end-users to manually update remote cluster settings via a settings update call. More context on connection rebuilding also in this [comment](https://github.com/elastic/elasticsearch/pull/102798/files#r1420454541). Relates: ES-6764 --- .../RemoteClusterCredentialsManager.java | 38 ++- .../transport/RemoteClusterService.java | 61 +++- .../RemoteClusterCredentialsManagerTests.java | 93 ++++- .../transport/RemoteClusterServiceTests.java | 197 +++++++++++ ...AbstractRemoteClusterSecurityTestCase.java | 63 +++- .../RemoteClusterSecurityCcrMigrationIT.java | 14 +- ...lusterSecurityReloadCredentialsRestIT.java | 239 +++++++++++++ ...teClusterSecurityTransformMigrationIT.java | 12 +- .../ReloadRemoteClusterCredentialsIT.java | 317 ------------------ .../xpack/security/Security.java | 10 +- ...tReloadRemoteClusterCredentialsAction.java | 34 +- 11 files changed, 708 insertions(+), 370 deletions(-) create mode 100644 x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityReloadCredentialsRestIT.java delete mode 100644 x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/ReloadRemoteClusterCredentialsIT.java diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteClusterCredentialsManager.java b/server/src/main/java/org/elasticsearch/transport/RemoteClusterCredentialsManager.java index 58e84f5e4ef11..7a6df38addba8 100644 --- a/server/src/main/java/org/elasticsearch/transport/RemoteClusterCredentialsManager.java +++ b/server/src/main/java/org/elasticsearch/transport/RemoteClusterCredentialsManager.java @@ -13,33 +13,41 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.core.Nullable; +import java.util.Collections; import java.util.Map; +import java.util.Set; import static org.elasticsearch.transport.RemoteClusterService.REMOTE_CLUSTER_CREDENTIALS; public class RemoteClusterCredentialsManager { - private static final Logger logger = LogManager.getLogger(RemoteClusterCredentialsManager.class); - private volatile Map clusterCredentials; + private volatile Map clusterCredentials = Collections.emptyMap(); @SuppressWarnings("this-escape") public RemoteClusterCredentialsManager(Settings settings) { updateClusterCredentials(settings); } - public final void updateClusterCredentials(Settings settings) { - clusterCredentials = REMOTE_CLUSTER_CREDENTIALS.getAsMap(settings); - logger.debug( - () -> Strings.format( - "Updated remote cluster credentials for clusters: [%s]", - Strings.collectionToCommaDelimitedString(clusterCredentials.keySet()) - ) - ); + public final synchronized UpdateRemoteClusterCredentialsResult updateClusterCredentials(Settings settings) { + final Map newClusterCredentials = REMOTE_CLUSTER_CREDENTIALS.getAsMap(settings); + if (clusterCredentials.isEmpty()) { + setClusterCredentialsAndLog(newClusterCredentials); + return new UpdateRemoteClusterCredentialsResult(Set.copyOf(newClusterCredentials.keySet()), Collections.emptySet()); + } + + final Set addedClusterAliases = Sets.difference(newClusterCredentials.keySet(), clusterCredentials.keySet()); + final Set removedClusterAliases = Sets.difference(clusterCredentials.keySet(), newClusterCredentials.keySet()); + setClusterCredentialsAndLog(newClusterCredentials); + assert Sets.haveEmptyIntersection(removedClusterAliases, addedClusterAliases); + return new UpdateRemoteClusterCredentialsResult(addedClusterAliases, removedClusterAliases); } + public record UpdateRemoteClusterCredentialsResult(Set addedClusterAliases, Set removedClusterAliases) {} + @Nullable public SecureString resolveCredentials(String clusterAlias) { return clusterCredentials.get(clusterAlias); @@ -49,5 +57,15 @@ public boolean hasCredentials(String clusterAlias) { return clusterCredentials.containsKey(clusterAlias); } + private void setClusterCredentialsAndLog(Map newClusterCredentials) { + clusterCredentials = newClusterCredentials; + logger.debug( + () -> Strings.format( + "Updated remote cluster credentials for clusters: [%s]", + Strings.collectionToCommaDelimitedString(clusterCredentials.keySet()) + ) + ); + } + public static final RemoteClusterCredentialsManager EMPTY = new RemoteClusterCredentialsManager(Settings.EMPTY); } diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java b/server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java index 6bfbb95cbcfe9..0090010e49e2b 100644 --- a/server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java +++ b/server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java @@ -16,6 +16,7 @@ import org.elasticsearch.action.support.CountDownActionListener; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.action.support.RefCountingRunnable; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeRole; @@ -32,6 +33,7 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.node.ReportingService; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.RemoteClusterCredentialsManager.UpdateRemoteClusterCredentialsResult; import java.io.Closeable; import java.io.IOException; @@ -48,6 +50,7 @@ import java.util.concurrent.TimeoutException; import java.util.function.BiFunction; import java.util.function.Function; +import java.util.function.Supplier; import java.util.stream.Stream; import static org.elasticsearch.common.settings.Setting.boolSetting; @@ -304,12 +307,53 @@ private synchronized void updateSkipUnavailable(String clusterAlias, Boolean ski } } - public void updateRemoteClusterCredentials(Settings settings) { - remoteClusterCredentialsManager.updateClusterCredentials(settings); + public synchronized void updateRemoteClusterCredentials(Supplier settingsSupplier, ActionListener listener) { + final Settings settings = settingsSupplier.get(); + final UpdateRemoteClusterCredentialsResult result = remoteClusterCredentialsManager.updateClusterCredentials(settings); + // We only need to rebuild connections when a credential was newly added or removed for a cluster alias, not if the credential + // value was updated. Therefore, only consider added or removed aliases + final int totalConnectionsToRebuild = result.addedClusterAliases().size() + result.removedClusterAliases().size(); + if (totalConnectionsToRebuild == 0) { + logger.debug("no connection rebuilding required after credentials update"); + listener.onResponse(null); + return; + } + logger.info("rebuilding [{}] connections after credentials update", totalConnectionsToRebuild); + try (var connectionRefs = new RefCountingRunnable(() -> listener.onResponse(null))) { + for (var clusterAlias : result.addedClusterAliases()) { + maybeRebuildConnectionOnCredentialsChange(clusterAlias, settings, connectionRefs); + } + for (var clusterAlias : result.removedClusterAliases()) { + maybeRebuildConnectionOnCredentialsChange(clusterAlias, settings, connectionRefs); + } + } } - public RemoteClusterCredentialsManager getRemoteClusterCredentialsManager() { - return remoteClusterCredentialsManager; + // package-private for testing + + private void maybeRebuildConnectionOnCredentialsChange(String clusterAlias, Settings settings, RefCountingRunnable connectionRefs) { + if (false == remoteClusters.containsKey(clusterAlias)) { + // A credential was added or removed before a remote connection was configured. + // Without an existing connection, there is nothing to rebuild. + logger.info("no connection rebuild required for remote cluster [{}] after credentials change", clusterAlias); + return; + } + + updateRemoteCluster(clusterAlias, settings, true, ActionListener.releaseAfter(new ActionListener<>() { + @Override + public void onResponse(RemoteClusterConnectionStatus status) { + logger.info("remote cluster connection [{}] updated after credentials change: [{}]", clusterAlias, status); + } + + @Override + public void onFailure(Exception e) { + // We don't want to return an error to the upstream listener here since a connection rebuild failure + // does *not* imply a failure to reload secure settings; however, that's how it would surface in the reload-settings call. + // Instead, we log a warning which is also consistent with how we handle remote cluster settings updates (logging instead of + // returning an error) + logger.warn(() -> "failed to update remote cluster connection [" + clusterAlias + "] after credentials change", e); + } + }, connectionRefs.acquire())); } @Override @@ -346,9 +390,14 @@ public void onFailure(Exception e) { * @param newSettings the updated settings for the remote connection * @param listener a listener invoked once every configured cluster has been connected to */ - synchronized void updateRemoteCluster( + void updateRemoteCluster(String clusterAlias, Settings newSettings, ActionListener listener) { + updateRemoteCluster(clusterAlias, newSettings, false, listener); + } + + private synchronized void updateRemoteCluster( String clusterAlias, Settings newSettings, + boolean forceRebuild, ActionListener listener ) { if (LOCAL_CLUSTER_GROUP_KEY.equals(clusterAlias)) { @@ -373,7 +422,7 @@ synchronized void updateRemoteCluster( remote = new RemoteClusterConnection(finalSettings, clusterAlias, transportService, remoteClusterCredentialsManager); remoteClusters.put(clusterAlias, remote); remote.ensureConnected(listener.map(ignored -> RemoteClusterConnectionStatus.CONNECTED)); - } else if (remote.shouldRebuildConnection(newSettings)) { + } else if (forceRebuild || remote.shouldRebuildConnection(newSettings)) { // Changes to connection configuration. Must tear down existing connection try { IOUtils.close(remote); diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteClusterCredentialsManagerTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteClusterCredentialsManagerTests.java index f02148a40e47e..b814138f3ed22 100644 --- a/server/src/test/java/org/elasticsearch/transport/RemoteClusterCredentialsManagerTests.java +++ b/server/src/test/java/org/elasticsearch/transport/RemoteClusterCredentialsManagerTests.java @@ -12,6 +12,8 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -20,18 +22,80 @@ public void testResolveRemoteClusterCredentials() { final String clusterAlias = randomAlphaOfLength(9); final String otherClusterAlias = randomAlphaOfLength(10); - final String secret = randomAlphaOfLength(20); - final Settings settings = buildSettingsWithCredentials(clusterAlias, secret); - RemoteClusterCredentialsManager credentialsManager = new RemoteClusterCredentialsManager(settings); - assertThat(credentialsManager.resolveCredentials(clusterAlias).toString(), equalTo(secret)); - assertThat(credentialsManager.hasCredentials(otherClusterAlias), is(false)); + final RemoteClusterCredentialsManager credentialsManager = new RemoteClusterCredentialsManager(Settings.EMPTY); + { + final String secret = randomAlphaOfLength(20); + final Settings settings = buildSettingsWithCredentials(clusterAlias, secret); + final RemoteClusterCredentialsManager.UpdateRemoteClusterCredentialsResult actual = credentialsManager.updateClusterCredentials( + settings + ); + assertThat(actual.addedClusterAliases(), containsInAnyOrder(clusterAlias)); + assertThat(actual.removedClusterAliases(), is(empty())); + assertThat(credentialsManager.resolveCredentials(clusterAlias).toString(), equalTo(secret)); + assertThat(credentialsManager.hasCredentials(otherClusterAlias), is(false)); + } - final String updatedSecret = randomAlphaOfLength(21); - credentialsManager.updateClusterCredentials(buildSettingsWithCredentials(clusterAlias, updatedSecret)); - assertThat(credentialsManager.resolveCredentials(clusterAlias).toString(), equalTo(updatedSecret)); + { + final String updatedSecret = randomAlphaOfLength(21); + final RemoteClusterCredentialsManager.UpdateRemoteClusterCredentialsResult actual = credentialsManager.updateClusterCredentials( + buildSettingsWithCredentials(clusterAlias, updatedSecret) + ); + assertThat(credentialsManager.resolveCredentials(clusterAlias).toString(), equalTo(updatedSecret)); + assertThat(actual.addedClusterAliases(), is(empty())); + assertThat(actual.removedClusterAliases(), is(empty())); + } - credentialsManager.updateClusterCredentials(Settings.EMPTY); - assertThat(credentialsManager.hasCredentials(clusterAlias), is(false)); + { + final RemoteClusterCredentialsManager.UpdateRemoteClusterCredentialsResult actual = credentialsManager.updateClusterCredentials( + Settings.EMPTY + ); + assertThat(actual.addedClusterAliases(), is(empty())); + assertThat(actual.removedClusterAliases(), containsInAnyOrder(clusterAlias)); + assertThat(credentialsManager.hasCredentials(clusterAlias), is(false)); + } + } + + public void testUpdateRemoteClusterCredentials() { + final String clusterAlias = randomAlphaOfLength(9); + final String otherClusterAlias = randomAlphaOfLength(10); + + final RemoteClusterCredentialsManager credentialsManager = new RemoteClusterCredentialsManager(Settings.EMPTY); + + // addition + { + final RemoteClusterCredentialsManager.UpdateRemoteClusterCredentialsResult actual = credentialsManager.updateClusterCredentials( + buildSettingsWithRandomCredentialsForAliases(clusterAlias, otherClusterAlias) + ); + assertThat(actual.addedClusterAliases(), containsInAnyOrder(clusterAlias, otherClusterAlias)); + assertThat(actual.removedClusterAliases(), is(empty())); + } + + // update and removal + { + final RemoteClusterCredentialsManager.UpdateRemoteClusterCredentialsResult actual = credentialsManager.updateClusterCredentials( + buildSettingsWithRandomCredentialsForAliases(clusterAlias) + ); + assertThat(actual.addedClusterAliases(), is(empty())); + assertThat(actual.removedClusterAliases(), containsInAnyOrder(otherClusterAlias)); + } + + // addition and removal + { + final RemoteClusterCredentialsManager.UpdateRemoteClusterCredentialsResult actual = credentialsManager.updateClusterCredentials( + buildSettingsWithRandomCredentialsForAliases(otherClusterAlias) + ); + assertThat(actual.addedClusterAliases(), containsInAnyOrder(otherClusterAlias)); + assertThat(actual.removedClusterAliases(), containsInAnyOrder(clusterAlias)); + } + + // removal + { + final RemoteClusterCredentialsManager.UpdateRemoteClusterCredentialsResult actual = credentialsManager.updateClusterCredentials( + Settings.EMPTY + ); + assertThat(actual.addedClusterAliases(), is(empty())); + assertThat(actual.removedClusterAliases(), containsInAnyOrder(otherClusterAlias)); + } } private Settings buildSettingsWithCredentials(String clusterAlias, String secret) { @@ -40,4 +104,13 @@ private Settings buildSettingsWithCredentials(String clusterAlias, String secret secureSettings.setString("cluster.remote." + clusterAlias + ".credentials", secret); return builder.setSecureSettings(secureSettings).build(); } + + private Settings buildSettingsWithRandomCredentialsForAliases(String... clusterAliases) { + final Settings.Builder builder = Settings.builder(); + final MockSecureSettings secureSettings = new MockSecureSettings(); + for (var alias : clusterAliases) { + secureSettings.setString("cluster.remote." + alias + ".credentials", randomAlphaOfLength(42)); + } + return builder.setSecureSettings(secureSettings).build(); + } } diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java index 1a530a1602b18..29a5d5a34e37f 100644 --- a/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java +++ b/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java @@ -1426,6 +1426,203 @@ public void testUseDifferentTransportProfileForCredentialsProtectedRemoteCluster } } + public void testUpdateRemoteClusterCredentialsRebuildsConnectionWithCorrectProfile() throws IOException, InterruptedException { + final List knownNodes = new CopyOnWriteArrayList<>(); + try ( + MockTransportService c = startTransport( + "cluster_1", + knownNodes, + VersionInformation.CURRENT, + TransportVersion.current(), + Settings.builder() + .put(RemoteClusterPortSettings.REMOTE_CLUSTER_SERVER_ENABLED.getKey(), "true") + .put(RemoteClusterPortSettings.PORT.getKey(), "0") + .build() + ) + ) { + final DiscoveryNode discoNode = c.getLocalDiscoNode().withTransportAddress(c.boundRemoteAccessAddress().publishAddress()); + try ( + MockTransportService transportService = MockTransportService.createNewService( + Settings.EMPTY, + VersionInformation.CURRENT, + TransportVersion.current(), + threadPool, + null + ) + ) { + transportService.start(); + transportService.acceptIncomingRequests(); + + try (RemoteClusterService service = new RemoteClusterService(Settings.EMPTY, transportService)) { + service.initializeRemoteClusters(); + + final Settings clusterSettings = buildRemoteClusterSettings("cluster_1", discoNode.getAddress().toString()); + final CountDownLatch latch = new CountDownLatch(1); + service.updateRemoteCluster("cluster_1", clusterSettings, connectionListener(latch)); + latch.await(); + + assertConnectionHasProfile(service.getRemoteClusterConnection("cluster_1"), "default"); + + { + final MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString("cluster.remote.cluster_1.credentials", randomAlphaOfLength(10)); + final PlainActionFuture listener = new PlainActionFuture<>(); + final Settings settings = Settings.builder().put(clusterSettings).setSecureSettings(secureSettings).build(); + service.updateRemoteClusterCredentials(() -> settings, listener); + listener.actionGet(10, TimeUnit.SECONDS); + } + + assertConnectionHasProfile( + service.getRemoteClusterConnection("cluster_1"), + RemoteClusterPortSettings.REMOTE_CLUSTER_PROFILE + ); + + { + final PlainActionFuture listener = new PlainActionFuture<>(); + service.updateRemoteClusterCredentials( + // Settings without credentials constitute credentials removal + () -> clusterSettings, + listener + ); + listener.actionGet(10, TimeUnit.SECONDS); + } + + assertConnectionHasProfile(service.getRemoteClusterConnection("cluster_1"), "default"); + } + } + } + } + + public void testUpdateRemoteClusterCredentialsRebuildsMultipleConnectionsDespiteFailures() throws IOException, InterruptedException { + final List knownNodes = new CopyOnWriteArrayList<>(); + try ( + MockTransportService c1 = startTransport( + "cluster_1", + knownNodes, + VersionInformation.CURRENT, + TransportVersion.current(), + Settings.builder() + .put(RemoteClusterPortSettings.REMOTE_CLUSTER_SERVER_ENABLED.getKey(), "true") + .put(RemoteClusterPortSettings.PORT.getKey(), "0") + .build() + ); + MockTransportService c2 = startTransport( + "cluster_2", + knownNodes, + VersionInformation.CURRENT, + TransportVersion.current(), + Settings.builder() + .put(RemoteClusterPortSettings.REMOTE_CLUSTER_SERVER_ENABLED.getKey(), "true") + .put(RemoteClusterPortSettings.PORT.getKey(), "0") + .build() + ) + ) { + final DiscoveryNode c1DiscoNode = c1.getLocalDiscoNode().withTransportAddress(c1.boundRemoteAccessAddress().publishAddress()); + final DiscoveryNode c2DiscoNode = c2.getLocalDiscoNode().withTransportAddress(c2.boundRemoteAccessAddress().publishAddress()); + try ( + MockTransportService transportService = MockTransportService.createNewService( + Settings.EMPTY, + VersionInformation.CURRENT, + TransportVersion.current(), + threadPool, + null + ) + ) { + // fail on connection attempt + transportService.addConnectBehavior(c2DiscoNode.getAddress(), (transport, discoveryNode, profile, listener) -> { + throw new RuntimeException("bad cluster"); + }); + + transportService.start(); + transportService.acceptIncomingRequests(); + + final String goodCluster = randomAlphaOfLength(10); + final String badCluster = randomValueOtherThan(goodCluster, () -> randomAlphaOfLength(10)); + final String missingCluster = randomValueOtherThanMany( + alias -> alias.equals(goodCluster) || alias.equals(badCluster), + () -> randomAlphaOfLength(10) + ); + try (RemoteClusterService service = new RemoteClusterService(Settings.EMPTY, transportService)) { + service.initializeRemoteClusters(); + + final Settings cluster1Settings = buildRemoteClusterSettings(goodCluster, c1DiscoNode.getAddress().toString()); + final var latch = new CountDownLatch(1); + service.updateRemoteCluster(goodCluster, cluster1Settings, connectionListener(latch)); + latch.await(); + + final Settings cluster2Settings = buildRemoteClusterSettings(badCluster, c2DiscoNode.getAddress().toString()); + final PlainActionFuture future = new PlainActionFuture<>(); + service.updateRemoteCluster(badCluster, cluster2Settings, future); + final var ex = expectThrows(Exception.class, () -> future.actionGet(10, TimeUnit.SECONDS)); + assertThat(ex.getMessage(), containsString("bad cluster")); + + assertConnectionHasProfile(service.getRemoteClusterConnection(goodCluster), "default"); + assertConnectionHasProfile(service.getRemoteClusterConnection(badCluster), "default"); + expectThrows(NoSuchRemoteClusterException.class, () -> service.getRemoteClusterConnection(missingCluster)); + + { + final MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString("cluster.remote." + badCluster + ".credentials", randomAlphaOfLength(10)); + secureSettings.setString("cluster.remote." + goodCluster + ".credentials", randomAlphaOfLength(10)); + secureSettings.setString("cluster.remote." + missingCluster + ".credentials", randomAlphaOfLength(10)); + final PlainActionFuture listener = new PlainActionFuture<>(); + final Settings settings = Settings.builder() + .put(cluster1Settings) + .put(cluster2Settings) + .setSecureSettings(secureSettings) + .build(); + service.updateRemoteClusterCredentials(() -> settings, listener); + listener.actionGet(10, TimeUnit.SECONDS); + } + + assertConnectionHasProfile( + service.getRemoteClusterConnection(goodCluster), + RemoteClusterPortSettings.REMOTE_CLUSTER_PROFILE + ); + assertConnectionHasProfile( + service.getRemoteClusterConnection(badCluster), + RemoteClusterPortSettings.REMOTE_CLUSTER_PROFILE + ); + expectThrows(NoSuchRemoteClusterException.class, () -> service.getRemoteClusterConnection(missingCluster)); + + { + final PlainActionFuture listener = new PlainActionFuture<>(); + final Settings settings = Settings.builder().put(cluster1Settings).put(cluster2Settings).build(); + service.updateRemoteClusterCredentials( + // Settings without credentials constitute credentials removal + () -> settings, + listener + ); + listener.actionGet(10, TimeUnit.SECONDS); + } + + assertConnectionHasProfile(service.getRemoteClusterConnection(goodCluster), "default"); + assertConnectionHasProfile(service.getRemoteClusterConnection(badCluster), "default"); + expectThrows(NoSuchRemoteClusterException.class, () -> service.getRemoteClusterConnection(missingCluster)); + } + } + } + } + + private static void assertConnectionHasProfile(RemoteClusterConnection remoteClusterConnection, String expectedConnectionProfile) { + assertThat( + remoteClusterConnection.getConnectionManager().getConnectionProfile().getTransportProfile(), + equalTo(expectedConnectionProfile) + ); + } + + private Settings buildRemoteClusterSettings(String clusterAlias, String address) { + final Settings.Builder settings = Settings.builder(); + final boolean proxyMode = randomBoolean(); + if (proxyMode) { + settings.put("cluster.remote." + clusterAlias + ".mode", "proxy") + .put("cluster.remote." + clusterAlias + ".proxy_address", address); + } else { + settings.put("cluster.remote." + clusterAlias + ".seeds", address); + } + return settings.build(); + } + public void testLogsConnectionResult() throws IOException { try ( diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/AbstractRemoteClusterSecurityTestCase.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/AbstractRemoteClusterSecurityTestCase.java index d05c3c85cd07f..2aa96ffc4e443 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/AbstractRemoteClusterSecurityTestCase.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/AbstractRemoteClusterSecurityTestCase.java @@ -21,6 +21,7 @@ import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.Strings; import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.MutableSettingsProvider; import org.elasticsearch.test.cluster.local.LocalClusterConfigProvider; import org.elasticsearch.test.cluster.util.resource.Resource; import org.elasticsearch.test.rest.ESRestTestCase; @@ -36,8 +37,11 @@ import java.util.Locale; import java.util.Map; +import static org.hamcrest.Matchers.anEmptyMap; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; public abstract class AbstractRemoteClusterSecurityTestCase extends ESRestTestCase { @@ -49,8 +53,10 @@ public abstract class AbstractRemoteClusterSecurityTestCase extends ESRestTestCa protected static final String REMOTE_TRANSFORM_USER = "remote_transform_user"; protected static final String REMOTE_SEARCH_ROLE = "remote_search"; protected static final String REMOTE_CLUSTER_ALIAS = "my_remote_cluster"; + private static final String KEYSTORE_PASSWORD = "keystore-password"; protected static LocalClusterConfigProvider commonClusterConfig = cluster -> cluster.module("analysis-common") + .keystorePassword(KEYSTORE_PASSWORD) .setting("xpack.license.self_generated.type", "trial") .setting("xpack.security.enabled", "true") .setting("xpack.security.authc.token.enabled", "true") @@ -191,21 +197,72 @@ protected void configureRemoteCluster( boolean skipUnavailable ) throws Exception { // For configurable remote cluster security, this method assumes the cross cluster access API key is already configured in keystore + putRemoteClusterSettings(clusterAlias, targetFulfillingCluster, basicSecurity, isProxyMode, skipUnavailable); + + // Ensure remote cluster is connected + checkRemoteConnection(clusterAlias, targetFulfillingCluster, basicSecurity, isProxyMode); + } + + protected void configureRemoteClusterCredentials(String clusterAlias, String credentials, MutableSettingsProvider keystoreSettings) + throws IOException { + keystoreSettings.put("cluster.remote." + clusterAlias + ".credentials", credentials); + queryCluster.updateStoredSecureSettings(); + reloadSecureSettings(); + } + + protected void removeRemoteClusterCredentials(String clusterAlias, MutableSettingsProvider keystoreSettings) throws IOException { + keystoreSettings.remove("cluster.remote." + clusterAlias + ".credentials"); + queryCluster.updateStoredSecureSettings(); + reloadSecureSettings(); + } + + @SuppressWarnings("unchecked") + private void reloadSecureSettings() throws IOException { + final Request request = new Request("POST", "/_nodes/reload_secure_settings"); + request.setJsonEntity("{\"secure_settings_password\":\"" + KEYSTORE_PASSWORD + "\"}"); + final Response reloadResponse = adminClient().performRequest(request); + assertOK(reloadResponse); + final Map map = entityAsMap(reloadResponse); + assertThat(map.get("nodes"), instanceOf(Map.class)); + final Map nodes = (Map) map.get("nodes"); + assertThat(nodes, is(not(anEmptyMap()))); + for (Map.Entry entry : nodes.entrySet()) { + assertThat(entry.getValue(), instanceOf(Map.class)); + final Map node = (Map) entry.getValue(); + assertThat(node.get("reload_exception"), nullValue()); + } + } + + protected void putRemoteClusterSettings( + String clusterAlias, + ElasticsearchCluster targetFulfillingCluster, + boolean basicSecurity, + boolean isProxyMode, + boolean skipUnavailable + ) throws IOException { final Settings.Builder builder = Settings.builder(); final String remoteClusterEndpoint = basicSecurity ? targetFulfillingCluster.getTransportEndpoint(0) : targetFulfillingCluster.getRemoteClusterServerEndpoint(0); if (isProxyMode) { builder.put("cluster.remote." + clusterAlias + ".mode", "proxy") - .put("cluster.remote." + clusterAlias + ".proxy_address", remoteClusterEndpoint); + .put("cluster.remote." + clusterAlias + ".proxy_address", remoteClusterEndpoint) + .putNull("cluster.remote." + clusterAlias + ".seeds"); } else { builder.put("cluster.remote." + clusterAlias + ".mode", "sniff") - .putList("cluster.remote." + clusterAlias + ".seeds", remoteClusterEndpoint); + .putList("cluster.remote." + clusterAlias + ".seeds", remoteClusterEndpoint) + .putNull("cluster.remote." + clusterAlias + ".proxy_address"); } builder.put("cluster.remote." + clusterAlias + ".skip_unavailable", skipUnavailable); updateClusterSettings(builder.build()); + } - // Ensure remote cluster is connected + protected void checkRemoteConnection( + String clusterAlias, + ElasticsearchCluster targetFulfillingCluster, + boolean basicSecurity, + boolean isProxyMode + ) throws Exception { final Request remoteInfoRequest = new Request("GET", "/_remote/info"); assertBusy(() -> { final Response remoteInfoResponse = adminClient().performRequest(remoteInfoRequest); diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityCcrMigrationIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityCcrMigrationIT.java index 90e3d9e42b744..fbdc5782a7ddf 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityCcrMigrationIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityCcrMigrationIT.java @@ -162,7 +162,7 @@ public void testRcs1Setup() throws Exception { // First migrate to RCS 2.0 @Order(30) - public void testFollowerClusterRestartForRcs2() throws IOException { + public void testFollowerClusterCredentialsChangeForRcs2() throws IOException { // Update the ccr_user_role so that it is sufficient for both RCS 1.0 and 2.0 final Request putRoleRequest = new Request("POST", "/_security/role/" + CCR_USER_ROLE); putRoleRequest.setJsonEntity(""" @@ -202,9 +202,7 @@ public void testFollowerClusterRestartForRcs2() throws IOException { } ] }"""); - keystoreSettings.put("cluster.remote.my_remote_cluster.credentials", (String) crossClusterAccessApiKey.get("encoded")); - queryCluster.restart(false); - closeClients(); + configureRemoteClusterCredentials("my_remote_cluster", (String) crossClusterAccessApiKey.get("encoded"), keystoreSettings); } @Order(40) @@ -239,7 +237,7 @@ public void testRcs2Setup() throws Exception { // Second migrate back to RCS 1.0 @Order(50) - public void testFollowerClusterRestartAgainForRcs1() throws IOException { + public void testFollowerClusterCredentialsChangeForRcs1() throws IOException { // Remove the RCS 2.0 remote cluster removeRemoteCluster(); @@ -266,9 +264,7 @@ public void testFollowerClusterRestartAgainForRcs1() throws IOException { indexDocsToLeaderCluster("metrics-004", 1); // Remove remote cluster credentials to revert back to RCS 1.0 - keystoreSettings.remove("cluster.remote.my_remote_cluster.credentials"); - queryCluster.restart(false); - closeClients(); + removeRemoteClusterCredentials("my_remote_cluster", keystoreSettings); } @Order(60) @@ -373,7 +369,7 @@ private void verifyReplicatedDocuments(long numberOfDocs, String... indices) thr } finally { searchResponse.decRef(); } - }, 30, TimeUnit.SECONDS); + }, 60, TimeUnit.SECONDS); } private void assertFollowerInfo(String followIndexName, String leaderClusterName, String leadIndexName, String status) diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityReloadCredentialsRestIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityReloadCredentialsRestIT.java new file mode 100644 index 0000000000000..478d3c76f4b36 --- /dev/null +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityReloadCredentialsRestIT.java @@ -0,0 +1,239 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.remotecluster; + +import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite; + +import org.apache.lucene.tests.util.TimeUnits; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.RequestOptions; +import org.elasticsearch.client.Response; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.search.SearchHit; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.MutableSettingsProvider; +import org.elasticsearch.test.cluster.util.resource.Resource; +import org.junit.After; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +import java.io.IOException; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.stream.Collectors; + +import static org.hamcrest.Matchers.containsInAnyOrder; + +// account for slow stored secure settings updates (involves removing and re-creating the keystore) +@TimeoutSuite(millis = 10 * TimeUnits.MINUTE) +public class RemoteClusterSecurityReloadCredentialsRestIT extends AbstractRemoteClusterSecurityTestCase { + + private static final MutableSettingsProvider keystoreSettings = new MutableSettingsProvider(); + + static { + fulfillingCluster = ElasticsearchCluster.local() + .name("fulfilling-cluster") + .apply(commonClusterConfig) + .setting("remote_cluster_server.enabled", "true") + .setting("remote_cluster.port", "0") + .setting("xpack.security.remote_cluster_server.ssl.enabled", "true") + .setting("xpack.security.remote_cluster_server.ssl.key", "remote-cluster.key") + .setting("xpack.security.remote_cluster_server.ssl.certificate", "remote-cluster.crt") + .keystore("xpack.security.remote_cluster_server.ssl.secure_key_passphrase", "remote-cluster-password") + .build(); + + queryCluster = ElasticsearchCluster.local() + .name("query-cluster") + .apply(commonClusterConfig) + .setting("xpack.security.remote_cluster_client.ssl.enabled", "true") + .setting("xpack.security.remote_cluster_client.ssl.certificate_authorities", "remote-cluster-ca.crt") + .keystore(keystoreSettings) + .settings((ignored) -> { + // Use an alternative cluster alias to test credential setup when remote cluster settings are configured in + // elasticsearch.yml + final Map settings = new HashMap<>(); + final String remoteClusterEndpoint = fulfillingCluster.getRemoteClusterServerEndpoint(0); + final boolean isProxyMode = randomBoolean(); + final String clusterAlias = "my_aliased_remote_cluster"; + if (isProxyMode) { + settings.put("cluster.remote." + clusterAlias + ".mode", "proxy"); + settings.put("cluster.remote." + clusterAlias + ".proxy_address", "\"" + remoteClusterEndpoint + "\""); + } else { + settings.put("cluster.remote." + clusterAlias + ".mode", "sniff"); + settings.put("cluster.remote." + clusterAlias + ".seeds", "[\"" + remoteClusterEndpoint + "\"]"); + } + return settings; + }) + .rolesFile(Resource.fromClasspath("roles.yml")) + .user(REMOTE_SEARCH_USER, PASS.toString(), "read_remote_shared_logs", false) + .build(); + } + + @ClassRule + // Use a RuleChain to ensure that fulfilling cluster is started before query cluster + public static TestRule clusterRule = RuleChain.outerRule(fulfillingCluster).around(queryCluster); + + @Before + public void setup() throws IOException { + indexDocumentsOnFulfillingCluster(); + } + + @After + public void cleanUp() throws IOException { + removeRemoteCluster(); + removeRemoteClusterCredentials("my_remote_cluster", keystoreSettings); + } + + public void testFirstTimeSetupWithElasticsearchSettings() throws Exception { + final Map apiKeyMap = createCrossClusterAccessApiKey(""" + { + "search": [ + { + "names": ["*"] + } + ] + }"""); + configureRemoteClusterCredentials("my_aliased_remote_cluster", (String) apiKeyMap.get("encoded"), keystoreSettings); + assertSharedLogsSearchSuccess("my_aliased_remote_cluster"); + removeRemoteClusterCredentials("my_aliased_remote_cluster", keystoreSettings); + } + + public void testFirstTimeSetup() throws Exception { + configureRcs2(); + assertSharedLogsSearchSuccess("my_remote_cluster"); + } + + public void testUpgradeFromRcs1() throws Exception { + // Setup RCS 1.0 and check that it works + configureRemoteCluster("my_remote_cluster", fulfillingCluster, true, randomBoolean(), randomBoolean()); + final Request putRoleRequest = new Request("POST", "/_security/role/read_remote_shared_logs"); + putRoleRequest.setJsonEntity(""" + { + "indices": [ + { + "names": [ "shared-logs" ], + "privileges": [ "read", "read_cross_cluster" ] + } + ] + }"""); + performRequestAgainstFulfillingCluster(putRoleRequest); + assertSharedLogsSearchSuccess("my_remote_cluster"); + + // Now migrate to RCS 2.0 + // Optionally remove existing cluster definition first. In practice removing the cluster definition first is the recommended + // approach since otherwise the reload-secure-settings call may result in WARN logs, but it's functionally possible not to + // remove the definition + if (randomBoolean()) { + removeRemoteCluster(); + } + configureRcs2(); + assertSharedLogsSearchSuccess("my_remote_cluster"); + } + + public void testDowngradeToRcs1() throws Exception { + configureRcs2(); + assertSharedLogsSearchSuccess("my_remote_cluster"); + + if (randomBoolean()) { + removeRemoteCluster(); + } + removeRemoteClusterCredentials("my_remote_cluster", keystoreSettings); + configureRemoteCluster("my_remote_cluster", fulfillingCluster, true, randomBoolean(), randomBoolean()); + final Request putRoleRequest = new Request("POST", "/_security/role/read_remote_shared_logs"); + putRoleRequest.setJsonEntity(""" + { + "indices": [ + { + "names": [ "shared-logs" ], + "privileges": [ "read", "read_cross_cluster" ] + } + ] + }"""); + performRequestAgainstFulfillingCluster(putRoleRequest); + assertSharedLogsSearchSuccess("my_remote_cluster"); + } + + private void removeRemoteCluster() throws IOException { + updateClusterSettings( + Settings.builder() + .putNull("cluster.remote.my_remote_cluster.mode") + .putNull("cluster.remote.my_remote_cluster.skip_unavailable") + .putNull("cluster.remote.my_remote_cluster.proxy_address") + .putNull("cluster.remote.my_remote_cluster.seeds") + .build() + ); + } + + private void configureRcs2() throws Exception { + final Map apiKeyMap = createCrossClusterAccessApiKey(""" + { + "search": [ + { + "names": ["*"] + } + ] + }"""); + final String remoteClusterCredentials = (String) apiKeyMap.get("encoded"); + + final boolean isProxyMode = randomBoolean(); + final boolean configureSettingsFirst = randomBoolean(); + // it's valid to first configure remote cluster, then credentials + if (configureSettingsFirst) { + putRemoteClusterSettings("my_remote_cluster", fulfillingCluster, false, isProxyMode, randomBoolean()); + } + + configureRemoteClusterCredentials("my_remote_cluster", remoteClusterCredentials, keystoreSettings); + + // also valid to configure credentials, then cluster + if (false == configureSettingsFirst) { + configureRemoteCluster("my_remote_cluster"); + } else { + // now that credentials are configured, we expect a successful connection + checkRemoteConnection("my_remote_cluster", fulfillingCluster, false, isProxyMode); + } + } + + private void assertSharedLogsSearchSuccess(String clusterAlias) throws IOException { + final Response response = performRequestWithRemoteSearchUser( + new Request( + "GET", + String.format(Locale.ROOT, "/%s:shared-logs/_search?ccs_minimize_roundtrips=%s", clusterAlias, randomBoolean()) + ) + ); + assertOK(response); + final SearchResponse searchResponse = SearchResponse.fromXContent(responseAsParser(response)); + try { + final List actualIndices = Arrays.stream(searchResponse.getHits().getHits()) + .map(SearchHit::getIndex) + .collect(Collectors.toList()); + assertThat(actualIndices, containsInAnyOrder("shared-logs")); + } finally { + searchResponse.decRef(); + } + } + + private void indexDocumentsOnFulfillingCluster() throws IOException { + final var indexDocRequest = new Request("POST", "/shared-logs/_doc/1?refresh=true"); + indexDocRequest.setJsonEntity("{\"field\": \"1\"}"); + assertOK(performRequestAgainstFulfillingCluster(indexDocRequest)); + } + + private Response performRequestWithRemoteSearchUser(final Request request) throws IOException { + request.setOptions( + RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", headerFromRandomAuthMethod(REMOTE_SEARCH_USER, PASS)) + ); + return client().performRequest(request); + } + +} diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityTransformMigrationIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityTransformMigrationIT.java index 39985212bc2fd..0376786225723 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityTransformMigrationIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityTransformMigrationIT.java @@ -156,7 +156,7 @@ public void testRcs1Setup() throws Exception { // First migrate to RCS 2.0 @Order(30) - public void testQueryClusterRestartForRcs2() throws IOException { + public void testQueryClusterCredentialsChangeForRcs2() throws IOException { // Update the transform_user_role so that it is sufficient for both RCS 1.0 and 2.0 final Request putRoleRequest = new Request("POST", "/_security/role/" + TRANSFORM_USER_ROLE); putRoleRequest.setJsonEntity(""" @@ -198,9 +198,7 @@ public void testQueryClusterRestartForRcs2() throws IOException { } ] }"""); - keystoreSettings.put("cluster.remote.my_remote_cluster.credentials", (String) crossClusterAccessApiKey.get("encoded")); - queryCluster.restart(false); - closeClients(); + configureRemoteClusterCredentials("my_remote_cluster", (String) crossClusterAccessApiKey.get("encoded"), keystoreSettings); } @Order(40) @@ -222,7 +220,7 @@ public void testRcs2Setup() throws Exception { // Second migrate back to RCS 1.0 @Order(50) - public void testQueryClusterRestartAgainForRcs1() throws IOException { + public void testQueryClusterCredentialsChangeAgainForRcs1() throws IOException { stopTransform(); // Remove the RCS 2.0 remote cluster @@ -247,9 +245,7 @@ public void testQueryClusterRestartAgainForRcs1() throws IOException { indexSourceDocuments(new UserStars("a", 0)); // Remove remote cluster credentials to revert back to RCS 1.0 - keystoreSettings.remove("cluster.remote.my_remote_cluster.credentials"); - queryCluster.restart(false); - closeClients(); + removeRemoteClusterCredentials("my_remote_cluster", keystoreSettings); } @Order(60) diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/ReloadRemoteClusterCredentialsIT.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/ReloadRemoteClusterCredentialsIT.java deleted file mode 100644 index 7d91f8994c20a..0000000000000 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/ReloadRemoteClusterCredentialsIT.java +++ /dev/null @@ -1,317 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.security; - -import org.apache.lucene.search.TotalHits; -import org.elasticsearch.TransportVersion; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.cluster.node.reload.NodesReloadSecureSettingsRequest; -import org.elasticsearch.action.admin.cluster.node.reload.NodesReloadSecureSettingsResponse; -import org.elasticsearch.action.admin.cluster.node.reload.TransportNodesReloadSecureSettingsAction; -import org.elasticsearch.action.admin.cluster.remote.RemoteClusterNodesAction; -import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; -import org.elasticsearch.action.admin.cluster.state.ClusterStateAction; -import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; -import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; -import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.search.SearchShardsRequest; -import org.elasticsearch.action.search.SearchShardsResponse; -import org.elasticsearch.action.search.ShardSearchFailure; -import org.elasticsearch.action.search.TransportSearchAction; -import org.elasticsearch.action.search.TransportSearchShardsAction; -import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.node.VersionInformation; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.KeyStoreWrapper; -import org.elasticsearch.common.settings.SecureString; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.TransportAddress; -import org.elasticsearch.common.util.concurrent.ConcurrentCollections; -import org.elasticsearch.common.util.concurrent.EsExecutors; -import org.elasticsearch.env.Environment; -import org.elasticsearch.search.SearchHits; -import org.elasticsearch.search.aggregations.InternalAggregations; -import org.elasticsearch.test.SecuritySingleNodeTestCase; -import org.elasticsearch.test.transport.MockTransportService; -import org.elasticsearch.threadpool.TestThreadPool; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.RemoteClusterCredentialsManager; -import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.security.authc.ApiKeyService; -import org.elasticsearch.xpack.security.authc.CrossClusterAccessHeaders; -import org.junit.BeforeClass; - -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.concurrent.BlockingQueue; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicReference; - -import static org.hamcrest.Matchers.empty; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.hasKey; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.not; -import static org.hamcrest.Matchers.notNullValue; -import static org.hamcrest.Matchers.nullValue; - -public class ReloadRemoteClusterCredentialsIT extends SecuritySingleNodeTestCase { - private static final String CLUSTER_ALIAS = "my_remote_cluster"; - - @BeforeClass - public static void disableInFips() { - assumeFalse( - "Cannot run in FIPS mode since the keystore will be password protected and sending a password in the reload" - + "settings api call, require TLS to be configured for the transport layer", - inFipsJvm() - ); - } - - @Override - public String configRoles() { - return org.elasticsearch.core.Strings.format(""" - user: - cluster: [ "ALL" ] - indices: - - names: '*' - privileges: [ "ALL" ] - remote_indices: - - names: '*' - privileges: [ "ALL" ] - clusters: ["*"] - """); - } - - @Override - public void tearDown() throws Exception { - try { - clearRemoteCluster(); - super.tearDown(); - } finally { - ThreadPool.terminate(threadPool, 10, TimeUnit.SECONDS); - } - } - - private final ThreadPool threadPool = new TestThreadPool(getClass().getName()); - - public void testReloadRemoteClusterCredentials() throws Exception { - final String credentials = randomAlphaOfLength(42); - writeCredentialsToKeyStore(credentials); - final RemoteClusterCredentialsManager clusterCredentialsManager = getInstanceFromNode(TransportService.class) - .getRemoteClusterService() - .getRemoteClusterCredentialsManager(); - // Until we reload, credentials written to keystore are not loaded into the credentials manager - assertThat(clusterCredentialsManager.hasCredentials(CLUSTER_ALIAS), is(false)); - reloadSecureSettings(); - assertThat(clusterCredentialsManager.resolveCredentials(CLUSTER_ALIAS), equalTo(credentials)); - - // Check that credentials get used for a remote connection, once we configure it - final BlockingQueue> capturedHeaders = ConcurrentCollections.newBlockingQueue(); - try (MockTransportService remoteTransport = startTransport("remoteNodeA", threadPool, capturedHeaders)) { - final TransportAddress remoteAddress = remoteTransport.getOriginalTransport() - .profileBoundAddresses() - .get("_remote_cluster") - .publishAddress(); - - configureRemoteCluster(remoteAddress); - - // Run search to trigger header capturing on the receiving side - client().search(new SearchRequest(CLUSTER_ALIAS + ":index-a")).get().decRef(); - - assertHeadersContainCredentialsThenClear(credentials, capturedHeaders); - - // Update credentials and ensure they are used - final String updatedCredentials = randomAlphaOfLength(41); - writeCredentialsToKeyStore(updatedCredentials); - reloadSecureSettings(); - - client().search(new SearchRequest(CLUSTER_ALIAS + ":index-a")).get().decRef(); - - assertHeadersContainCredentialsThenClear(updatedCredentials, capturedHeaders); - } - } - - private void assertHeadersContainCredentialsThenClear(String credentials, BlockingQueue> capturedHeaders) { - assertThat(capturedHeaders, is(not(empty()))); - for (Map actualHeaders : capturedHeaders) { - assertThat(actualHeaders, hasKey(CrossClusterAccessHeaders.CROSS_CLUSTER_ACCESS_CREDENTIALS_HEADER_KEY)); - assertThat( - actualHeaders.get(CrossClusterAccessHeaders.CROSS_CLUSTER_ACCESS_CREDENTIALS_HEADER_KEY), - equalTo(ApiKeyService.withApiKeyPrefix(credentials)) - ); - } - capturedHeaders.clear(); - assertThat(capturedHeaders, is(empty())); - } - - private void clearRemoteCluster() throws InterruptedException, ExecutionException { - final var builder = Settings.builder() - .putNull("cluster.remote." + CLUSTER_ALIAS + ".mode") - .putNull("cluster.remote." + CLUSTER_ALIAS + ".seeds") - .putNull("cluster.remote." + CLUSTER_ALIAS + ".proxy_address"); - clusterAdmin().updateSettings(new ClusterUpdateSettingsRequest().persistentSettings(builder)).get(); - } - - @Override - protected Settings nodeSettings() { - return Settings.builder().put(super.nodeSettings()).put("xpack.security.remote_cluster_client.ssl.enabled", false).build(); - } - - private void configureRemoteCluster(TransportAddress remoteAddress) throws InterruptedException, ExecutionException { - final Settings.Builder builder = Settings.builder(); - if (randomBoolean()) { - builder.put("cluster.remote." + CLUSTER_ALIAS + ".mode", "sniff") - .put("cluster.remote." + CLUSTER_ALIAS + ".seeds", remoteAddress.toString()) - .putNull("cluster.remote." + CLUSTER_ALIAS + ".proxy_address"); - } else { - builder.put("cluster.remote." + CLUSTER_ALIAS + ".mode", "proxy") - .put("cluster.remote." + CLUSTER_ALIAS + ".proxy_address", remoteAddress.toString()) - .putNull("cluster.remote." + CLUSTER_ALIAS + ".seeds"); - } - clusterAdmin().updateSettings(new ClusterUpdateSettingsRequest().persistentSettings(builder)).get(); - } - - private void writeCredentialsToKeyStore(String credentials) throws Exception { - final Environment environment = getInstanceFromNode(Environment.class); - final KeyStoreWrapper keyStoreWrapper = KeyStoreWrapper.create(); - keyStoreWrapper.setString("cluster.remote." + CLUSTER_ALIAS + ".credentials", credentials.toCharArray()); - keyStoreWrapper.save(environment.configFile(), new char[0], false); - } - - public static MockTransportService startTransport( - final String nodeName, - final ThreadPool threadPool, - final BlockingQueue> capturedHeaders - ) { - boolean success = false; - final Settings settings = Settings.builder() - .put("node.name", nodeName) - .put("remote_cluster_server.enabled", "true") - .put("remote_cluster.port", "0") - .put("xpack.security.remote_cluster_server.ssl.enabled", "false") - .build(); - final MockTransportService service = MockTransportService.createNewService( - settings, - VersionInformation.CURRENT, - TransportVersion.current(), - threadPool, - null - ); - try { - service.registerRequestHandler( - ClusterStateAction.NAME, - EsExecutors.DIRECT_EXECUTOR_SERVICE, - ClusterStateRequest::new, - (request, channel, task) -> { - capturedHeaders.add(Map.copyOf(threadPool.getThreadContext().getHeaders())); - channel.sendResponse( - new ClusterStateResponse(ClusterName.DEFAULT, ClusterState.builder(ClusterName.DEFAULT).build(), false) - ); - } - ); - service.registerRequestHandler( - RemoteClusterNodesAction.TYPE.name(), - EsExecutors.DIRECT_EXECUTOR_SERVICE, - RemoteClusterNodesAction.Request::new, - (request, channel, task) -> { - capturedHeaders.add(Map.copyOf(threadPool.getThreadContext().getHeaders())); - channel.sendResponse(new RemoteClusterNodesAction.Response(List.of())); - } - ); - service.registerRequestHandler( - TransportSearchShardsAction.TYPE.name(), - EsExecutors.DIRECT_EXECUTOR_SERVICE, - SearchShardsRequest::new, - (request, channel, task) -> { - capturedHeaders.add(Map.copyOf(threadPool.getThreadContext().getHeaders())); - channel.sendResponse(new SearchShardsResponse(List.of(), List.of(), Collections.emptyMap())); - } - ); - service.registerRequestHandler( - TransportSearchAction.TYPE.name(), - EsExecutors.DIRECT_EXECUTOR_SERVICE, - SearchRequest::new, - (request, channel, task) -> { - capturedHeaders.add(Map.copyOf(threadPool.getThreadContext().getHeaders())); - channel.sendResponse( - new SearchResponse( - SearchHits.empty(new TotalHits(0, TotalHits.Relation.EQUAL_TO), Float.NaN), - InternalAggregations.EMPTY, - null, - false, - null, - null, - 1, - null, - 1, - 1, - 0, - 100, - ShardSearchFailure.EMPTY_ARRAY, - SearchResponse.Clusters.EMPTY - ) - ); - } - ); - service.start(); - service.acceptIncomingRequests(); - success = true; - return service; - } finally { - if (success == false) { - service.close(); - } - } - } - - private void reloadSecureSettings() { - final AtomicReference reloadSettingsError = new AtomicReference<>(); - final CountDownLatch latch = new CountDownLatch(1); - final SecureString emptyPassword = randomBoolean() ? new SecureString(new char[0]) : null; - - final var request = new NodesReloadSecureSettingsRequest(); - try { - request.nodesIds(Strings.EMPTY_ARRAY); - request.setSecureStorePassword(emptyPassword); - client().execute(TransportNodesReloadSecureSettingsAction.TYPE, request, new ActionListener<>() { - @Override - public void onResponse(NodesReloadSecureSettingsResponse nodesReloadResponse) { - try { - assertThat(nodesReloadResponse, notNullValue()); - final Map nodesMap = nodesReloadResponse.getNodesMap(); - assertThat(nodesMap.size(), equalTo(1)); - for (final NodesReloadSecureSettingsResponse.NodeResponse nodeResponse : nodesReloadResponse.getNodes()) { - assertThat(nodeResponse.reloadException(), nullValue()); - } - } catch (final AssertionError e) { - reloadSettingsError.set(e); - } finally { - latch.countDown(); - } - } - - @Override - public void onFailure(Exception e) { - reloadSettingsError.set(new AssertionError("Nodes request failed", e)); - latch.countDown(); - } - }); - } finally { - request.decRef(); - } - safeAwait(latch); - if (reloadSettingsError.get() != null) { - throw reloadSettingsError.get(); - } - } -} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index a9af4b4ba104a..cc6c659e1478b 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -391,7 +391,6 @@ import java.util.Objects; import java.util.Set; import java.util.concurrent.Executor; -import java.util.concurrent.TimeUnit; import java.util.function.BiConsumer; import java.util.function.Function; import java.util.function.Predicate; @@ -1975,14 +1974,19 @@ private void reloadSharedSecretsForJwtRealms(Settings settingsWithKeystore) { * See {@link TransportReloadRemoteClusterCredentialsAction} for more context. */ private void reloadRemoteClusterCredentials(Settings settingsWithKeystore) { + // Using `settings` instead of `settingsWithKeystore` is deliberate: we are not interested in secure settings here + if (DiscoveryNode.isStateless(settings)) { + // Stateless does not support remote cluster operations. Skip. + return; + } + final PlainActionFuture future = new PlainActionFuture<>(); getClient().execute( ActionTypes.RELOAD_REMOTE_CLUSTER_CREDENTIALS_ACTION, new TransportReloadRemoteClusterCredentialsAction.Request(settingsWithKeystore), future ); - assert future.isDone() : "expecting local-only action call to return immediately on invocation"; - future.actionGet(0, TimeUnit.NANOSECONDS); + future.actionGet(); } static final class ValidateLicenseForFIPS implements BiConsumer { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/settings/TransportReloadRemoteClusterCredentialsAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/settings/TransportReloadRemoteClusterCredentialsAction.java index d6f54e9d3e9e1..22dcf1b4f9daa 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/settings/TransportReloadRemoteClusterCredentialsAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/settings/TransportReloadRemoteClusterCredentialsAction.java @@ -13,16 +13,22 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.TransportAction; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.RemoteClusterService; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.transport.Transports; import org.elasticsearch.xpack.core.security.action.ActionTypes; import org.elasticsearch.xpack.security.Security; import java.io.IOException; +import java.util.function.Supplier; /** * This is a local-only action which updates remote cluster credentials for remote cluster connections, from keystore settings reloaded via @@ -39,18 +45,38 @@ public class TransportReloadRemoteClusterCredentialsAction extends TransportActi ActionResponse.Empty> { private final RemoteClusterService remoteClusterService; + private final ClusterService clusterService; @Inject - public TransportReloadRemoteClusterCredentialsAction(TransportService transportService, ActionFilters actionFilters) { + public TransportReloadRemoteClusterCredentialsAction( + TransportService transportService, + ClusterService clusterService, + ActionFilters actionFilters + ) { super(ActionTypes.RELOAD_REMOTE_CLUSTER_CREDENTIALS_ACTION.name(), actionFilters, transportService.getTaskManager()); this.remoteClusterService = transportService.getRemoteClusterService(); + this.clusterService = clusterService; } @Override protected void doExecute(Task task, Request request, ActionListener listener) { - // We avoid stashing and marking context as system to keep the action as minimal as possible (i.e., avoid copying context) - remoteClusterService.updateRemoteClusterCredentials(request.getSettings()); - listener.onResponse(ActionResponse.Empty.INSTANCE); + assert Transports.assertNotTransportThread("Remote connection re-building is too much for a transport thread"); + final ClusterState clusterState = clusterService.state(); + final ClusterBlockException clusterBlockException = checkBlock(clusterState); + if (clusterBlockException != null) { + throw clusterBlockException; + } + // Use a supplier to ensure we resolve cluster settings inside a synchronized block, to prevent race conditions + final Supplier settingsSupplier = () -> { + final Settings persistentSettings = clusterState.metadata().persistentSettings(); + final Settings transientSettings = clusterState.metadata().transientSettings(); + return Settings.builder().put(request.getSettings(), true).put(persistentSettings, false).put(transientSettings, false).build(); + }; + remoteClusterService.updateRemoteClusterCredentials(settingsSupplier, listener.safeMap(ignored -> ActionResponse.Empty.INSTANCE)); + } + + private ClusterBlockException checkBlock(ClusterState clusterState) { + return clusterState.blocks().globalBlockedException(ClusterBlockLevel.METADATA_READ); } public static class Request extends ActionRequest { From 016c7783218f06e0ac125f9c9688c6c633513ee1 Mon Sep 17 00:00:00 2001 From: Simon Cooper Date: Thu, 11 Jan 2024 12:42:22 +0000 Subject: [PATCH 39/75] Remove NamedWriteableRegistry from NodeClient, pass it directly through to rest actions (#103277) --- .../elasticsearch/plugin/noop/NoopPlugin.java | 2 ++ .../datastreams/DataStreamsPlugin.java | 2 ++ .../ingest/common/IngestCommonPlugin.java | 2 ++ .../ingest/geoip/IngestGeoIpPlugin.java | 1 + .../script/mustache/MustachePlugin.java | 4 ++- .../mustache/RestSearchTemplateAction.java | 9 +++++- .../RestSearchTemplateActionTests.java | 10 ++++--- .../painless/PainlessPlugin.java | 2 ++ .../index/rankeval/RankEvalPlugin.java | 1 + .../AbstractBaseReindexRestHandler.java | 11 +++++-- .../elasticsearch/reindex/ReindexPlugin.java | 7 +++-- .../reindex/RestDeleteByQueryAction.java | 7 +++-- .../reindex/RestReindexAction.java | 7 +++-- .../reindex/RestUpdateByQueryAction.java | 7 +++-- .../reindex/RestDeleteByQueryActionTests.java | 10 ++++--- .../reindex/RestReindexActionTests.java | 3 +- .../reindex/RestUpdateByQueryActionTests.java | 10 ++++--- .../rest/root/MainRestPlugin.java | 2 ++ .../elasticsearch/http/SystemIndexRestIT.java | 2 ++ .../http/TestResponseHeaderPlugin.java | 2 ++ .../system/indices/SystemIndicesQA.java | 2 ++ .../coordination/RestHandlerNodesIT.java | 2 ++ .../elasticsearch/action/ActionModule.java | 9 ++++-- .../search/AbstractSearchAsyncAction.java | 6 +++- .../SearchDfsQueryThenFetchAsyncAction.java | 29 ++++++++++--------- .../SearchQueryThenFetchAsyncAction.java | 25 +++++++++------- .../action/search/SearchTransportService.java | 5 ---- .../TransportOpenPointInTimeAction.java | 7 ++++- .../action/search/TransportSearchAction.java | 2 ++ .../client/internal/node/NodeClient.java | 11 +------ .../elasticsearch/node/NodeConstruction.java | 4 +-- .../elasticsearch/plugins/ActionPlugin.java | 11 +++---- .../action/search/RestMultiSearchAction.java | 11 +++---- .../rest/action/search/RestSearchAction.java | 6 ++-- .../action/ActionModuleTests.java | 8 +++++ .../AbstractSearchAsyncActionTests.java | 1 + .../action/search/SearchAsyncActionTests.java | 6 ++++ .../SearchQueryThenFetchAsyncActionTests.java | 4 +++ .../internal/node/NodeClientHeadersTests.java | 11 +------ .../AbstractHttpServerTransportTests.java | 1 + .../indices/cluster/ClusterStateChanges.java | 10 +------ .../indices/RestValidateQueryActionTests.java | 10 +------ .../search/RestMultiSearchActionTests.java | 10 ++++--- .../action/search/RestSearchActionTests.java | 10 ++++--- .../snapshots/SnapshotResiliencyTests.java | 3 +- .../apmintegration/ApmIntegrationPlugin.java | 2 ++ .../diewithdignity/DieWithDignityPlugin.java | 2 ++ .../test/seektracker/SeekTrackerPlugin.java | 2 ++ .../AbstractCoordinatorTestCase.java | 3 +- .../test/client/NoOpNodeClient.java | 4 +-- .../xpack/search/AsyncSearch.java | 4 ++- .../search/RestSubmitAsyncSearchAction.java | 14 ++++----- .../RestSubmitAsyncSearchActionTests.java | 4 ++- .../xpack/autoscaling/Autoscaling.java | 1 + .../java/org/elasticsearch/xpack/ccr/Ccr.java | 1 + .../elasticsearch/xpack/core/XPackPlugin.java | 2 ++ .../core/LocalStateCompositeXPackPlugin.java | 3 ++ .../action/RestTermsEnumActionTests.java | 11 +------ .../deprecation/TestDeprecationPlugin.java | 2 ++ .../xpack/deprecation/Deprecation.java | 2 ++ .../xpack/downsample/Downsample.java | 1 + .../xpack/enrich/EnrichPlugin.java | 1 + .../xpack/application/EnterpriseSearch.java | 2 ++ .../LocalStateEnterpriseSearch.java | 3 ++ .../xpack/eql/plugin/EqlPlugin.java | 2 ++ .../xpack/esql/plugin/EsqlPlugin.java | 1 + .../org/elasticsearch/xpack/fleet/Fleet.java | 6 ++-- .../rest/RestFleetMultiSearchAction.java | 11 +++++-- .../fleet/rest/RestFleetSearchAction.java | 14 ++++----- .../xpack/frozen/FrozenIndices.java | 2 ++ .../org/elasticsearch/xpack/graph/Graph.java | 2 ++ .../xpack/idp/IdentityProviderPlugin.java | 2 ++ .../xpack/ilm/IndexLifecycle.java | 2 ++ .../xpack/inference/InferencePlugin.java | 1 + .../xpack/logstash/Logstash.java | 2 ++ .../xpack/ml/MachineLearning.java | 1 + .../xpack/ml/MachineLearningTests.java | 6 ++-- .../xpack/monitoring/Monitoring.java | 2 ++ .../xpack/profiling/ProfilingPlugin.java | 2 ++ .../metering/RepositoriesMeteringPlugin.java | 2 ++ .../elasticsearch/xpack/rollup/Rollup.java | 4 ++- .../rollup/rest/RestRollupSearchAction.java | 9 +++++- .../SearchableSnapshots.java | 2 ++ .../OperatorPrivilegesTestPlugin.java | 2 ++ .../xpack/security/Security.java | 1 + .../xpack/security/SecurityTests.java | 1 + .../xpack/shutdown/ShutdownPlugin.java | 2 ++ .../xpack/slm/SnapshotLifecycle.java | 2 ++ .../testkit/SnapshotRepositoryTestKit.java | 2 ++ .../xpack/sql/plugin/SqlPlugin.java | 1 + .../xpack/sql/plugin/SqlPluginTests.java | 1 + .../textstructure/TextStructurePlugin.java | 2 ++ .../xpack/transform/Transform.java | 2 ++ .../xpack/vectortile/VectorTilePlugin.java | 2 ++ .../elasticsearch/xpack/watcher/Watcher.java | 2 ++ .../xpack/watcher/WatcherPluginTests.java | 2 +- .../plugin/freeze/FreezeIndexPlugin.java | 2 ++ 97 files changed, 297 insertions(+), 169 deletions(-) diff --git a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/NoopPlugin.java b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/NoopPlugin.java index 571efd88aafec..81d6eaf690dbf 100644 --- a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/NoopPlugin.java +++ b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/NoopPlugin.java @@ -14,6 +14,7 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; @@ -47,6 +48,7 @@ public class NoopPlugin extends Plugin implements ActionPlugin { @Override public List getRestHandlers( Settings settings, + NamedWriteableRegistry namedWriteableRegistry, RestController restController, ClusterSettings clusterSettings, IndexScopedSettings indexScopedSettings, diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamsPlugin.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamsPlugin.java index 63920ed73bf4a..b69ea170eb476 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamsPlugin.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamsPlugin.java @@ -21,6 +21,7 @@ import org.elasticsearch.client.internal.OriginSettingClient; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; @@ -230,6 +231,7 @@ public Collection createComponents(PluginServices services) { @Override public List getRestHandlers( Settings settings, + NamedWriteableRegistry namedWriteableRegistry, RestController restController, ClusterSettings clusterSettings, IndexScopedSettings indexScopedSettings, diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/IngestCommonPlugin.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/IngestCommonPlugin.java index d9e7488953b38..dff65f1c7a1bc 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/IngestCommonPlugin.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/IngestCommonPlugin.java @@ -12,6 +12,7 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; @@ -83,6 +84,7 @@ public Map getProcessors(Processor.Parameters paramet @Override public List getRestHandlers( Settings settings, + NamedWriteableRegistry namedWriteableRegistry, RestController restController, ClusterSettings clusterSettings, IndexScopedSettings indexScopedSettings, diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java index 30ecc96a3171c..53c8db638923f 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java @@ -148,6 +148,7 @@ public List> getPersistentTasksExecutor( @Override public List getRestHandlers( Settings settings, + NamedWriteableRegistry namedWriteableRegistry, RestController restController, ClusterSettings clusterSettings, IndexScopedSettings indexScopedSettings, diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustachePlugin.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustachePlugin.java index b9996484c5bc0..310046cb11a20 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustachePlugin.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustachePlugin.java @@ -13,6 +13,7 @@ import org.elasticsearch.action.ActionType; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; @@ -58,6 +59,7 @@ public ScriptEngine getScriptEngine(Settings settings, Collection getRestHandlers( Settings settings, + NamedWriteableRegistry namedWriteableRegistry, RestController restController, ClusterSettings clusterSettings, IndexScopedSettings indexScopedSettings, @@ -66,7 +68,7 @@ public List getRestHandlers( Supplier nodesInCluster ) { return Arrays.asList( - new RestSearchTemplateAction(), + new RestSearchTemplateAction(namedWriteableRegistry), new RestMultiSearchTemplateAction(settings), new RestRenderSearchTemplateAction() ); diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestSearchTemplateAction.java index 0dbb810902b44..cfd726fd96fc3 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestSearchTemplateAction.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -33,6 +34,12 @@ public class RestSearchTemplateAction extends BaseRestHandler { private static final Set RESPONSE_PARAMS = Set.of(TYPED_KEYS_PARAM, RestSearchAction.TOTAL_HITS_AS_INT_PARAM); + private final NamedWriteableRegistry namedWriteableRegistry; + + public RestSearchTemplateAction(NamedWriteableRegistry namedWriteableRegistry) { + this.namedWriteableRegistry = namedWriteableRegistry; + } + @Override public List routes() { return List.of( @@ -62,7 +69,7 @@ public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client searchRequest, request, null, - client.getNamedWriteableRegistry(), + namedWriteableRegistry, size -> searchRequest.source().size(size) ); diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/RestSearchTemplateActionTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/RestSearchTemplateActionTests.java index 4e30d87b6a174..4cd14fa97d710 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/RestSearchTemplateActionTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/RestSearchTemplateActionTests.java @@ -8,27 +8,29 @@ package org.elasticsearch.script.mustache; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.search.RestSearchAction; import org.elasticsearch.test.rest.FakeRestRequest; import org.elasticsearch.test.rest.RestActionTestCase; import org.junit.Before; -import org.mockito.Mockito; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; +import static org.mockito.Mockito.mock; + public final class RestSearchTemplateActionTests extends RestActionTestCase { final List contentTypeHeader = Collections.singletonList(randomCompatibleMediaType(RestApiVersion.V_7)); @Before public void setUpAction() { - controller().registerHandler(new RestSearchTemplateAction()); - verifyingClient.setExecuteVerifier((actionType, request) -> Mockito.mock(SearchTemplateResponse.class)); - verifyingClient.setExecuteLocallyVerifier((actionType, request) -> Mockito.mock(SearchTemplateResponse.class)); + controller().registerHandler(new RestSearchTemplateAction(mock(NamedWriteableRegistry.class))); + verifyingClient.setExecuteVerifier((actionType, request) -> mock(SearchTemplateResponse.class)); + verifyingClient.setExecuteLocallyVerifier((actionType, request) -> mock(SearchTemplateResponse.class)); } public void testTypeInPath() { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java index 62302331b38d8..f9deddd5f4e85 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java @@ -13,6 +13,7 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; @@ -160,6 +161,7 @@ public List> getContexts() { @Override public List getRestHandlers( Settings settings, + NamedWriteableRegistry namedWriteableRegistry, RestController restController, ClusterSettings clusterSettings, IndexScopedSettings indexScopedSettings, diff --git a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalPlugin.java b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalPlugin.java index 814abcf02c569..0d421d3d1619c 100644 --- a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalPlugin.java +++ b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalPlugin.java @@ -42,6 +42,7 @@ public class RankEvalPlugin extends Plugin implements ActionPlugin { @Override public List getRestHandlers( Settings settings, + NamedWriteableRegistry namedWriteableRegistry, RestController restController, ClusterSettings clusterSettings, IndexScopedSettings indexScopedSettings, diff --git a/modules/reindex/src/main/java/org/elasticsearch/reindex/AbstractBaseReindexRestHandler.java b/modules/reindex/src/main/java/org/elasticsearch/reindex/AbstractBaseReindexRestHandler.java index 8e7fab68ac697..48c50450656f3 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/reindex/AbstractBaseReindexRestHandler.java +++ b/modules/reindex/src/main/java/org/elasticsearch/reindex/AbstractBaseReindexRestHandler.java @@ -39,10 +39,15 @@ protected AbstractBaseReindexRestHandler(A action) { this.action = action; } - protected RestChannelConsumer doPrepareRequest(RestRequest request, NodeClient client, boolean includeCreated, boolean includeUpdated) - throws IOException { + protected RestChannelConsumer doPrepareRequest( + RestRequest request, + NamedWriteableRegistry namedWriteableRegistry, + NodeClient client, + boolean includeCreated, + boolean includeUpdated + ) throws IOException { // Build the internal request - Request internal = setCommonOptions(request, buildRequest(request, client.getNamedWriteableRegistry())); + Request internal = setCommonOptions(request, buildRequest(request, namedWriteableRegistry)); // Executes the request and waits for completion if (request.paramAsBoolean("wait_for_completion", true)) { diff --git a/modules/reindex/src/main/java/org/elasticsearch/reindex/ReindexPlugin.java b/modules/reindex/src/main/java/org/elasticsearch/reindex/ReindexPlugin.java index b07eb1b158087..a5cfe8ce3ca11 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/reindex/ReindexPlugin.java +++ b/modules/reindex/src/main/java/org/elasticsearch/reindex/ReindexPlugin.java @@ -67,6 +67,7 @@ public List getNamedWriteables() { @Override public List getRestHandlers( Settings settings, + NamedWriteableRegistry namedWriteableRegistry, RestController restController, ClusterSettings clusterSettings, IndexScopedSettings indexScopedSettings, @@ -75,9 +76,9 @@ public List getRestHandlers( Supplier nodesInCluster ) { return Arrays.asList( - new RestReindexAction(), - new RestUpdateByQueryAction(), - new RestDeleteByQueryAction(), + new RestReindexAction(namedWriteableRegistry), + new RestUpdateByQueryAction(namedWriteableRegistry), + new RestDeleteByQueryAction(namedWriteableRegistry), new RestRethrottleAction(nodesInCluster) ); } diff --git a/modules/reindex/src/main/java/org/elasticsearch/reindex/RestDeleteByQueryAction.java b/modules/reindex/src/main/java/org/elasticsearch/reindex/RestDeleteByQueryAction.java index 6f225556144c9..99bd0c51f3084 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/reindex/RestDeleteByQueryAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/reindex/RestDeleteByQueryAction.java @@ -29,8 +29,11 @@ @ServerlessScope(Scope.PUBLIC) public class RestDeleteByQueryAction extends AbstractBulkByQueryRestHandler { - public RestDeleteByQueryAction() { + private final NamedWriteableRegistry namedWriteableRegistry; + + public RestDeleteByQueryAction(NamedWriteableRegistry namedWriteableRegistry) { super(DeleteByQueryAction.INSTANCE); + this.namedWriteableRegistry = namedWriteableRegistry; } @Override @@ -51,7 +54,7 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { - return doPrepareRequest(request, client, false, false); + return doPrepareRequest(request, namedWriteableRegistry, client, false, false); } @Override diff --git a/modules/reindex/src/main/java/org/elasticsearch/reindex/RestReindexAction.java b/modules/reindex/src/main/java/org/elasticsearch/reindex/RestReindexAction.java index 66ef3e028280b..44cbe4712455f 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/reindex/RestReindexAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/reindex/RestReindexAction.java @@ -32,8 +32,11 @@ @ServerlessScope(Scope.PUBLIC) public class RestReindexAction extends AbstractBaseReindexRestHandler implements RestRequestFilter { - public RestReindexAction() { + private final NamedWriteableRegistry namedWriteableRegistry; + + public RestReindexAction(NamedWriteableRegistry namedWriteableRegistry) { super(ReindexAction.INSTANCE); + this.namedWriteableRegistry = namedWriteableRegistry; } @Override @@ -48,7 +51,7 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { - return doPrepareRequest(request, client, true, true); + return doPrepareRequest(request, namedWriteableRegistry, client, true, true); } @Override diff --git a/modules/reindex/src/main/java/org/elasticsearch/reindex/RestUpdateByQueryAction.java b/modules/reindex/src/main/java/org/elasticsearch/reindex/RestUpdateByQueryAction.java index 50a2b7de6db39..b99e5acbd411d 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/reindex/RestUpdateByQueryAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/reindex/RestUpdateByQueryAction.java @@ -30,8 +30,11 @@ @ServerlessScope(Scope.PUBLIC) public class RestUpdateByQueryAction extends AbstractBulkByQueryRestHandler { - public RestUpdateByQueryAction() { + private final NamedWriteableRegistry namedWriteableRegistry; + + public RestUpdateByQueryAction(NamedWriteableRegistry namedWriteableRegistry) { super(UpdateByQueryAction.INSTANCE); + this.namedWriteableRegistry = namedWriteableRegistry; } @Override @@ -51,7 +54,7 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { - return doPrepareRequest(request, client, false, true); + return doPrepareRequest(request, namedWriteableRegistry, client, false, true); } @Override diff --git a/modules/reindex/src/test/java/org/elasticsearch/reindex/RestDeleteByQueryActionTests.java b/modules/reindex/src/test/java/org/elasticsearch/reindex/RestDeleteByQueryActionTests.java index fdd98992503d7..241707f6e0f93 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/reindex/RestDeleteByQueryActionTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/reindex/RestDeleteByQueryActionTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.reindex; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.index.reindex.BulkByScrollResponse; import org.elasticsearch.rest.RestRequest; @@ -16,22 +17,23 @@ import org.elasticsearch.test.rest.RestActionTestCase; import org.elasticsearch.xcontent.XContentType; import org.junit.Before; -import org.mockito.Mockito; import java.io.IOException; import java.util.Collections; import java.util.List; import java.util.Map; +import static org.mockito.Mockito.mock; + public final class RestDeleteByQueryActionTests extends RestActionTestCase { final List contentTypeHeader = Collections.singletonList(compatibleMediaType(XContentType.VND_JSON, RestApiVersion.V_7)); @Before public void setUpAction() { - controller().registerHandler(new RestDeleteByQueryAction()); - verifyingClient.setExecuteVerifier((actionType, request) -> Mockito.mock(BulkByScrollResponse.class)); - verifyingClient.setExecuteLocallyVerifier((actionType, request) -> Mockito.mock(BulkByScrollResponse.class)); + controller().registerHandler(new RestDeleteByQueryAction(mock(NamedWriteableRegistry.class))); + verifyingClient.setExecuteVerifier((actionType, request) -> mock(BulkByScrollResponse.class)); + verifyingClient.setExecuteLocallyVerifier((actionType, request) -> mock(BulkByScrollResponse.class)); } public void testTypeInPath() throws IOException { diff --git a/modules/reindex/src/test/java/org/elasticsearch/reindex/RestReindexActionTests.java b/modules/reindex/src/test/java/org/elasticsearch/reindex/RestReindexActionTests.java index 0df013056dcdd..3484b61ca2c9a 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/reindex/RestReindexActionTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/reindex/RestReindexActionTests.java @@ -24,6 +24,7 @@ import java.util.Collections; import static java.util.Collections.singletonMap; +import static org.mockito.Mockito.mock; public class RestReindexActionTests extends RestActionTestCase { @@ -31,7 +32,7 @@ public class RestReindexActionTests extends RestActionTestCase { @Before public void setUpAction() { - action = new RestReindexAction(); + action = new RestReindexAction(mock(NamedWriteableRegistry.class)); controller().registerHandler(action); } diff --git a/modules/reindex/src/test/java/org/elasticsearch/reindex/RestUpdateByQueryActionTests.java b/modules/reindex/src/test/java/org/elasticsearch/reindex/RestUpdateByQueryActionTests.java index 889c8d0091c81..83e298c3a235f 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/reindex/RestUpdateByQueryActionTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/reindex/RestUpdateByQueryActionTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.reindex; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.index.reindex.BulkByScrollResponse; import org.elasticsearch.rest.RestRequest; @@ -16,22 +17,23 @@ import org.elasticsearch.test.rest.RestActionTestCase; import org.elasticsearch.xcontent.XContentType; import org.junit.Before; -import org.mockito.Mockito; import java.io.IOException; import java.util.Collections; import java.util.List; import java.util.Map; +import static org.mockito.Mockito.mock; + public final class RestUpdateByQueryActionTests extends RestActionTestCase { final List contentTypeHeader = Collections.singletonList(compatibleMediaType(XContentType.VND_JSON, RestApiVersion.V_7)); @Before public void setUpAction() { - controller().registerHandler(new RestUpdateByQueryAction()); - verifyingClient.setExecuteVerifier((actionType, request) -> Mockito.mock(BulkByScrollResponse.class)); - verifyingClient.setExecuteLocallyVerifier((actionType, request) -> Mockito.mock(BulkByScrollResponse.class)); + controller().registerHandler(new RestUpdateByQueryAction(mock(NamedWriteableRegistry.class))); + verifyingClient.setExecuteVerifier((actionType, request) -> mock(BulkByScrollResponse.class)); + verifyingClient.setExecuteLocallyVerifier((actionType, request) -> mock(BulkByScrollResponse.class)); } public void testTypeInPath() throws IOException { diff --git a/modules/rest-root/src/main/java/org/elasticsearch/rest/root/MainRestPlugin.java b/modules/rest-root/src/main/java/org/elasticsearch/rest/root/MainRestPlugin.java index 62063ddab9129..3045681483470 100644 --- a/modules/rest-root/src/main/java/org/elasticsearch/rest/root/MainRestPlugin.java +++ b/modules/rest-root/src/main/java/org/elasticsearch/rest/root/MainRestPlugin.java @@ -13,6 +13,7 @@ import org.elasticsearch.action.ActionType; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; @@ -32,6 +33,7 @@ public class MainRestPlugin extends Plugin implements ActionPlugin { @Override public List getRestHandlers( Settings settings, + NamedWriteableRegistry namedWriteableRegistry, RestController restController, ClusterSettings clusterSettings, IndexScopedSettings indexScopedSettings, diff --git a/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/SystemIndexRestIT.java b/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/SystemIndexRestIT.java index 7eb0a38ad8099..081135d6b1e17 100644 --- a/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/SystemIndexRestIT.java +++ b/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/SystemIndexRestIT.java @@ -20,6 +20,7 @@ import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; @@ -153,6 +154,7 @@ public static class SystemIndexTestPlugin extends Plugin implements SystemIndexP @Override public List getRestHandlers( Settings settings, + NamedWriteableRegistry namedWriteableRegistry, RestController restController, ClusterSettings clusterSettings, IndexScopedSettings indexScopedSettings, diff --git a/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/TestResponseHeaderPlugin.java b/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/TestResponseHeaderPlugin.java index 6a1f598c5e529..5c01d0fd430b4 100644 --- a/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/TestResponseHeaderPlugin.java +++ b/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/TestResponseHeaderPlugin.java @@ -10,6 +10,7 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; @@ -28,6 +29,7 @@ public class TestResponseHeaderPlugin extends Plugin implements ActionPlugin { @Override public List getRestHandlers( Settings settings, + NamedWriteableRegistry namedWriteableRegistry, RestController restController, ClusterSettings clusterSettings, IndexScopedSettings indexScopedSettings, diff --git a/qa/system-indices/src/main/java/org/elasticsearch/system/indices/SystemIndicesQA.java b/qa/system-indices/src/main/java/org/elasticsearch/system/indices/SystemIndicesQA.java index e6325d8bad6f6..971d1bad3e976 100644 --- a/qa/system-indices/src/main/java/org/elasticsearch/system/indices/SystemIndicesQA.java +++ b/qa/system-indices/src/main/java/org/elasticsearch/system/indices/SystemIndicesQA.java @@ -15,6 +15,7 @@ import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; @@ -127,6 +128,7 @@ private static XContentBuilder mappings() { @Override public List getRestHandlers( Settings settings, + NamedWriteableRegistry namedWriteableRegistry, RestController restController, ClusterSettings clusterSettings, IndexScopedSettings indexScopedSettings, diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/coordination/RestHandlerNodesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/coordination/RestHandlerNodesIT.java index 2c15ac9153ba2..efe3b097cae20 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/coordination/RestHandlerNodesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/coordination/RestHandlerNodesIT.java @@ -10,6 +10,7 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; @@ -38,6 +39,7 @@ public static class TestPlugin extends Plugin implements ActionPlugin { @Override public List getRestHandlers( Settings settings, + NamedWriteableRegistry namedWriteableRegistry, RestController restController, ClusterSettings clusterSettings, IndexScopedSettings indexScopedSettings, diff --git a/server/src/main/java/org/elasticsearch/action/ActionModule.java b/server/src/main/java/org/elasticsearch/action/ActionModule.java index dd70dc65b853b..e0c803514fe69 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/server/src/main/java/org/elasticsearch/action/ActionModule.java @@ -232,6 +232,7 @@ import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.TypeLiteral; import org.elasticsearch.common.inject.multibindings.MapBinder; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; @@ -446,6 +447,7 @@ public class ActionModule extends AbstractModule { private final Settings settings; private final IndexNameExpressionResolver indexNameExpressionResolver; + private final NamedWriteableRegistry namedWriteableRegistry; private final IndexScopedSettings indexScopedSettings; private final ClusterSettings clusterSettings; private final SettingsFilter settingsFilter; @@ -466,6 +468,7 @@ public class ActionModule extends AbstractModule { public ActionModule( Settings settings, IndexNameExpressionResolver indexNameExpressionResolver, + NamedWriteableRegistry namedWriteableRegistry, IndexScopedSettings indexScopedSettings, ClusterSettings clusterSettings, SettingsFilter settingsFilter, @@ -483,6 +486,7 @@ public ActionModule( ) { this.settings = settings; this.indexNameExpressionResolver = indexNameExpressionResolver; + this.namedWriteableRegistry = namedWriteableRegistry; this.indexScopedSettings = indexScopedSettings; this.clusterSettings = clusterSettings; this.settingsFilter = settingsFilter; @@ -929,12 +933,12 @@ public void initRestHandlers(Supplier nodesInCluster, Predicate< registerHandler.accept(new RestBulkAction(settings)); registerHandler.accept(new RestUpdateAction()); - registerHandler.accept(new RestSearchAction(restController.getSearchUsageHolder())); + registerHandler.accept(new RestSearchAction(restController.getSearchUsageHolder(), namedWriteableRegistry)); registerHandler.accept(new RestSearchScrollAction()); registerHandler.accept(new RestClearScrollAction()); registerHandler.accept(new RestOpenPointInTimeAction()); registerHandler.accept(new RestClosePointInTimeAction()); - registerHandler.accept(new RestMultiSearchAction(settings, restController.getSearchUsageHolder())); + registerHandler.accept(new RestMultiSearchAction(settings, restController.getSearchUsageHolder(), namedWriteableRegistry)); registerHandler.accept(new RestKnnSearchAction()); registerHandler.accept(new RestValidateQueryAction()); @@ -1007,6 +1011,7 @@ public void initRestHandlers(Supplier nodesInCluster, Predicate< for (ActionPlugin plugin : actionPlugins) { for (RestHandler handler : plugin.getRestHandlers( settings, + namedWriteableRegistry, restController, clusterSettings, indexScopedSettings, diff --git a/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java index 58ab8169ffb30..591b9a86cda20 100644 --- a/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java @@ -23,6 +23,7 @@ import org.elasticsearch.action.support.TransportActions; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.routing.GroupShardsIterator; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.core.Releasable; @@ -66,6 +67,7 @@ abstract class AbstractSearchAsyncAction extends SearchPhase implements SearchPhaseContext { private static final float DEFAULT_INDEX_BOOST = 1.0f; private final Logger logger; + private final NamedWriteableRegistry namedWriteableRegistry; private final SearchTransportService searchTransportService; private final Executor executor; private final ActionListener listener; @@ -105,6 +107,7 @@ abstract class AbstractSearchAsyncAction exten AbstractSearchAsyncAction( String name, Logger logger, + NamedWriteableRegistry namedWriteableRegistry, SearchTransportService searchTransportService, BiFunction nodeIdToConnection, Map aliasFilter, @@ -121,6 +124,7 @@ abstract class AbstractSearchAsyncAction exten SearchResponse.Clusters clusters ) { super(name); + this.namedWriteableRegistry = namedWriteableRegistry; final List toSkipIterators = new ArrayList<>(); final List iterators = new ArrayList<>(); for (final SearchShardIterator iterator : shardsIts) { @@ -647,7 +651,7 @@ public OriginalIndices getOriginalIndices(int shardIndex) { public boolean isPartOfPointInTime(ShardSearchContextId contextId) { final PointInTimeBuilder pointInTimeBuilder = request.pointInTimeBuilder(); if (pointInTimeBuilder != null) { - return request.pointInTimeBuilder().getSearchContextId(searchTransportService.getNamedWriteableRegistry()).contains(contextId); + return request.pointInTimeBuilder().getSearchContextId(namedWriteableRegistry).contains(contextId); } else { return false; } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java index 68d1bec590318..6fcfc97c33c9e 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java @@ -12,6 +12,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.routing.GroupShardsIterator; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.dfs.AggregatedDfs; @@ -31,24 +32,26 @@ final class SearchDfsQueryThenFetchAsyncAction extends AbstractSearchAsyncAction private final SearchProgressListener progressListener; SearchDfsQueryThenFetchAsyncAction( - final Logger logger, - final SearchTransportService searchTransportService, - final BiFunction nodeIdToConnection, - final Map aliasFilter, - final Map concreteIndexBoosts, - final Executor executor, - final SearchPhaseResults queryPhaseResultConsumer, - final SearchRequest request, - final ActionListener listener, - final GroupShardsIterator shardsIts, - final TransportSearchAction.SearchTimeProvider timeProvider, - final ClusterState clusterState, - final SearchTask task, + Logger logger, + NamedWriteableRegistry namedWriteableRegistry, + SearchTransportService searchTransportService, + BiFunction nodeIdToConnection, + Map aliasFilter, + Map concreteIndexBoosts, + Executor executor, + SearchPhaseResults queryPhaseResultConsumer, + SearchRequest request, + ActionListener listener, + GroupShardsIterator shardsIts, + TransportSearchAction.SearchTimeProvider timeProvider, + ClusterState clusterState, + SearchTask task, SearchResponse.Clusters clusters ) { super( "dfs", logger, + namedWriteableRegistry, searchTransportService, nodeIdToConnection, aliasFilter, diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java index 51d330f55aee1..3ad7c52567d14 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java @@ -13,6 +13,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.routing.GroupShardsIterator; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.internal.AliasFilter; @@ -37,17 +38,18 @@ class SearchQueryThenFetchAsyncAction extends AbstractSearchAsyncAction nodeIdToConnection, - final Map aliasFilter, - final Map concreteIndexBoosts, - final Executor executor, - final SearchPhaseResults resultConsumer, - final SearchRequest request, - final ActionListener listener, - final GroupShardsIterator shardsIts, - final TransportSearchAction.SearchTimeProvider timeProvider, + Logger logger, + NamedWriteableRegistry namedWriteableRegistry, + SearchTransportService searchTransportService, + BiFunction nodeIdToConnection, + Map aliasFilter, + Map concreteIndexBoosts, + Executor executor, + SearchPhaseResults resultConsumer, + SearchRequest request, + ActionListener listener, + GroupShardsIterator shardsIts, + TransportSearchAction.SearchTimeProvider timeProvider, ClusterState clusterState, SearchTask task, SearchResponse.Clusters clusters @@ -55,6 +57,7 @@ class SearchQueryThenFetchAsyncAction extends AbstractSearchAsyncAction( actionName, logger, + namedWriteableRegistry, searchTransportService, connectionLookup, aliasFilter, diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java index 4e9aed5f643f2..06e9b8fa51319 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java @@ -1307,6 +1307,7 @@ public SearchPhase newSearchPhase( if (searchRequest.searchType() == DFS_QUERY_THEN_FETCH) { return new SearchDfsQueryThenFetchAsyncAction( logger, + namedWriteableRegistry, searchTransportService, connectionLookup, aliasFilter, @@ -1325,6 +1326,7 @@ public SearchPhase newSearchPhase( assert searchRequest.searchType() == QUERY_THEN_FETCH : searchRequest.searchType(); return new SearchQueryThenFetchAsyncAction( logger, + namedWriteableRegistry, searchTransportService, connectionLookup, aliasFilter, diff --git a/server/src/main/java/org/elasticsearch/client/internal/node/NodeClient.java b/server/src/main/java/org/elasticsearch/client/internal/node/NodeClient.java index f75997d92b678..881e4dd6f5492 100644 --- a/server/src/main/java/org/elasticsearch/client/internal/node/NodeClient.java +++ b/server/src/main/java/org/elasticsearch/client/internal/node/NodeClient.java @@ -16,7 +16,6 @@ import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.support.AbstractClient; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskCancelledException; @@ -46,7 +45,6 @@ public class NodeClient extends AbstractClient { private Supplier localNodeId; private Transport.Connection localConnection; private RemoteClusterService remoteClusterService; - private NamedWriteableRegistry namedWriteableRegistry; public NodeClient(Settings settings, ThreadPool threadPool) { super(settings, threadPool); @@ -57,15 +55,13 @@ public void initialize( TaskManager taskManager, Supplier localNodeId, Transport.Connection localConnection, - RemoteClusterService remoteClusterService, - NamedWriteableRegistry namedWriteableRegistry + RemoteClusterService remoteClusterService ) { this.actions = actions; this.taskManager = taskManager; this.localNodeId = localNodeId; this.localConnection = localConnection; this.remoteClusterService = remoteClusterService; - this.namedWriteableRegistry = namedWriteableRegistry; } /** @@ -143,9 +139,4 @@ private Transpo public Client getRemoteClusterClient(String clusterAlias, Executor responseExecutor) { return remoteClusterService.getRemoteClusterClient(threadPool(), clusterAlias, responseExecutor, true); } - - public NamedWriteableRegistry getNamedWriteableRegistry() { - return namedWriteableRegistry; - } - } diff --git a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java index 018abebdb7709..aa62ea689a5a9 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java +++ b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java @@ -796,6 +796,7 @@ record PluginServiceInstances( ActionModule actionModule = new ActionModule( settings, clusterModule.getIndexNameExpressionResolver(), + namedWriteableRegistry, settingsModule.getIndexScopedSettings(), settingsModule.getClusterSettings(), settingsModule.getSettingsFilter(), @@ -1249,8 +1250,7 @@ private void postInjection( transportService.getTaskManager(), () -> clusterService.localNode().getId(), transportService.getLocalNodeConnection(), - transportService.getRemoteClusterService(), - namedWriteableRegistry + transportService.getRemoteClusterService() ); logger.debug("initializing HTTP handlers ..."); diff --git a/server/src/main/java/org/elasticsearch/plugins/ActionPlugin.java b/server/src/main/java/org/elasticsearch/plugins/ActionPlugin.java index 095eec2811edc..18e21094fc11d 100644 --- a/server/src/main/java/org/elasticsearch/plugins/ActionPlugin.java +++ b/server/src/main/java/org/elasticsearch/plugins/ActionPlugin.java @@ -18,6 +18,7 @@ import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; @@ -28,7 +29,6 @@ import java.util.Collection; import java.util.Collections; -import java.util.List; import java.util.Objects; import java.util.function.Supplier; @@ -37,7 +37,7 @@ *
{@code
  *   {@literal @}Override
  *   public List> getActions() {
- *       return Arrays.asList(new ActionHandler<>(ReindexAction.INSTANCE, TransportReindexAction.class),
+ *       return List.of(new ActionHandler<>(ReindexAction.INSTANCE, TransportReindexAction.class),
  *               new ActionHandler<>(UpdateByQueryAction.INSTANCE, TransportUpdateByQueryAction.class),
  *               new ActionHandler<>(DeleteByQueryAction.INSTANCE, TransportDeleteByQueryAction.class),
  *               new ActionHandler<>(RethrottleAction.INSTANCE, TransportRethrottleAction.class));
@@ -48,22 +48,23 @@ public interface ActionPlugin {
     /**
      * Actions added by this plugin.
      */
-    default List> getActions() {
+    default Collection> getActions() {
         return Collections.emptyList();
     }
 
     /**
      * ActionType filters added by this plugin.
      */
-    default List getActionFilters() {
+    default Collection getActionFilters() {
         return Collections.emptyList();
     }
 
     /**
      * Rest handlers added by this plugin.
      */
-    default List getRestHandlers(
+    default Collection getRestHandlers(
         Settings settings,
+        NamedWriteableRegistry namedWriteableRegistry,
         RestController restController,
         ClusterSettings clusterSettings,
         IndexScopedSettings indexScopedSettings,
diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java b/server/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java
index a881b2497b26c..66e7f8cdcbc62 100644
--- a/server/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java
+++ b/server/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java
@@ -49,10 +49,12 @@ public class RestMultiSearchAction extends BaseRestHandler {
 
     private final boolean allowExplicitIndex;
     private final SearchUsageHolder searchUsageHolder;
+    private final NamedWriteableRegistry namedWriteableRegistry;
 
-    public RestMultiSearchAction(Settings settings, SearchUsageHolder searchUsageHolder) {
+    public RestMultiSearchAction(Settings settings, SearchUsageHolder searchUsageHolder, NamedWriteableRegistry namedWriteableRegistry) {
         this.allowExplicitIndex = MULTI_ALLOW_EXPLICIT_INDEX.get(settings);
         this.searchUsageHolder = searchUsageHolder;
+        this.namedWriteableRegistry = namedWriteableRegistry;
     }
 
     @Override
@@ -74,12 +76,7 @@ public String getName() {
 
     @Override
     public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException {
-        final MultiSearchRequest multiSearchRequest = parseRequest(
-            request,
-            client.getNamedWriteableRegistry(),
-            allowExplicitIndex,
-            searchUsageHolder
-        );
+        final MultiSearchRequest multiSearchRequest = parseRequest(request, namedWriteableRegistry, allowExplicitIndex, searchUsageHolder);
         return channel -> {
             final RestCancellableNodeClient cancellableClient = new RestCancellableNodeClient(client, request.getHttpChannel());
             cancellableClient.execute(
diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java b/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java
index 711aec182525e..067cf2d800957 100644
--- a/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java
+++ b/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java
@@ -69,9 +69,11 @@ public class RestSearchAction extends BaseRestHandler {
     public static final Set RESPONSE_PARAMS = Set.of(TYPED_KEYS_PARAM, TOTAL_HITS_AS_INT_PARAM, INCLUDE_NAMED_QUERIES_SCORE_PARAM);
 
     private final SearchUsageHolder searchUsageHolder;
+    private final NamedWriteableRegistry namedWriteableRegistry;
 
-    public RestSearchAction(SearchUsageHolder searchUsageHolder) {
+    public RestSearchAction(SearchUsageHolder searchUsageHolder, NamedWriteableRegistry namedWriteableRegistry) {
         this.searchUsageHolder = searchUsageHolder;
+        this.namedWriteableRegistry = namedWriteableRegistry;
     }
 
     @Override
@@ -114,7 +116,7 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC
          */
         IntConsumer setSize = size -> searchRequest.source().size(size);
         request.withContentOrSourceParamParserOrNull(
-            parser -> parseSearchRequest(searchRequest, request, parser, client.getNamedWriteableRegistry(), setSize, searchUsageHolder)
+            parser -> parseSearchRequest(searchRequest, request, parser, namedWriteableRegistry, setSize, searchUsageHolder)
         );
 
         return channel -> {
diff --git a/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java b/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java
index a076537bb7351..e1a9062f91d17 100644
--- a/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java
+++ b/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java
@@ -15,6 +15,7 @@
 import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
 import org.elasticsearch.cluster.node.DiscoveryNodes;
 import org.elasticsearch.cluster.service.ClusterService;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
 import org.elasticsearch.common.settings.ClusterSettings;
 import org.elasticsearch.common.settings.IndexScopedSettings;
 import org.elasticsearch.common.settings.Settings;
@@ -110,6 +111,7 @@ public void testSetupRestHandlerContainsKnownBuiltin() {
         ActionModule actionModule = new ActionModule(
             settings.getSettings(),
             TestIndexNameExpressionResolver.newInstance(),
+            null,
             settings.getIndexScopedSettings(),
             settings.getClusterSettings(),
             settings.getSettingsFilter(),
@@ -147,6 +149,7 @@ public void testPluginCantOverwriteBuiltinRestHandler() throws IOException {
             @Override
             public List getRestHandlers(
                 Settings settings,
+                NamedWriteableRegistry namedWriteableRegistry,
                 RestController restController,
                 ClusterSettings clusterSettings,
                 IndexScopedSettings indexScopedSettings,
@@ -171,6 +174,7 @@ public String getName() {
             ActionModule actionModule = new ActionModule(
                 settings.getSettings(),
                 TestIndexNameExpressionResolver.newInstance(threadPool.getThreadContext()),
+                null,
                 settings.getIndexScopedSettings(),
                 settings.getClusterSettings(),
                 settings.getSettingsFilter(),
@@ -207,6 +211,7 @@ public void handleRequest(RestRequest request, RestChannel channel, NodeClient c
             @Override
             public List getRestHandlers(
                 Settings settings,
+                NamedWriteableRegistry namedWriteableRegistry,
                 RestController restController,
                 ClusterSettings clusterSettings,
                 IndexScopedSettings indexScopedSettings,
@@ -225,6 +230,7 @@ public List getRestHandlers(
             ActionModule actionModule = new ActionModule(
                 settings.getSettings(),
                 TestIndexNameExpressionResolver.newInstance(threadPool.getThreadContext()),
+                null,
                 settings.getIndexScopedSettings(),
                 settings.getClusterSettings(),
                 settings.getSettingsFilter(),
@@ -274,6 +280,7 @@ public void test3rdPartyHandlerIsNotInstalled() {
                 () -> new ActionModule(
                     settingsModule.getSettings(),
                     TestIndexNameExpressionResolver.newInstance(threadPool.getThreadContext()),
+                    null,
                     settingsModule.getIndexScopedSettings(),
                     settingsModule.getClusterSettings(),
                     settingsModule.getSettingsFilter(),
@@ -314,6 +321,7 @@ public void test3rdPartyRestControllerIsNotInstalled() {
                 () -> new ActionModule(
                     settingsModule.getSettings(),
                     TestIndexNameExpressionResolver.newInstance(threadPool.getThreadContext()),
+                    null,
                     settingsModule.getIndexScopedSettings(),
                     settingsModule.getClusterSettings(),
                     settingsModule.getSettingsFilter(),
diff --git a/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java
index bd6171e353add..0288a5b92c772 100644
--- a/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java
+++ b/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java
@@ -75,6 +75,7 @@ private AbstractSearchAsyncAction createAction(
             "test",
             logger,
             null,
+            null,
             nodeIdToConnection,
             Collections.singletonMap("foo", AliasFilter.of(new MatchAllQueryBuilder())),
             Collections.singletonMap("foo", 2.0f),
diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java
index d6b1bd8057708..30e634314e0ba 100644
--- a/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java
+++ b/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java
@@ -95,6 +95,7 @@ public void testSkipSearchShards() throws InterruptedException {
         AbstractSearchAsyncAction asyncAction = new AbstractSearchAsyncAction(
             "test",
             logger,
+            null,
             transportService,
             (cluster, node) -> {
                 assert cluster == null : "cluster was not null: " + cluster;
@@ -203,6 +204,7 @@ public void testLimitConcurrentShardRequests() throws InterruptedException {
             AbstractSearchAsyncAction asyncAction = new AbstractSearchAsyncAction<>(
                 "test",
                 logger,
+                null,
                 transportService,
                 (cluster, node) -> {
                     assert cluster == null : "cluster was not null: " + cluster;
@@ -318,6 +320,7 @@ public void sendFreeContext(Transport.Connection connection, ShardSearchContextI
             AbstractSearchAsyncAction asyncAction = new AbstractSearchAsyncAction<>(
                 "test",
                 logger,
+                null,
                 transportService,
                 (cluster, node) -> {
                     assert cluster == null : "cluster was not null: " + cluster;
@@ -443,6 +446,7 @@ public void sendFreeContext(Transport.Connection connection, ShardSearchContextI
             AbstractSearchAsyncAction asyncAction = new AbstractSearchAsyncAction<>(
                 "test",
                 logger,
+                null,
                 transportService,
                 (cluster, node) -> {
                     assert cluster == null : "cluster was not null: " + cluster;
@@ -551,6 +555,7 @@ public void testAllowPartialResults() throws InterruptedException {
             AbstractSearchAsyncAction asyncAction = new AbstractSearchAsyncAction<>(
                 "test",
                 logger,
+                null,
                 transportService,
                 (cluster, node) -> {
                     assert cluster == null : "cluster was not null: " + cluster;
@@ -652,6 +657,7 @@ public void testSkipUnavailableSearchShards() throws InterruptedException {
         AbstractSearchAsyncAction asyncAction = new AbstractSearchAsyncAction<>(
             "test",
             logger,
+            null,
             new SearchTransportService(null, null, null),
             (cluster, node) -> {
                 assert cluster == null : "cluster was not null: " + cluster;
diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java
index a973fa20851db..760070979077d 100644
--- a/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java
+++ b/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java
@@ -200,6 +200,7 @@ public void sendExecuteQuery(
         try {
             SearchQueryThenFetchAsyncAction action = new SearchQueryThenFetchAsyncAction(
                 logger,
+                null,
                 searchTransportService,
                 (clusterAlias, node) -> lookup.get(node),
                 Collections.singletonMap("_na_", AliasFilter.EMPTY),
@@ -351,6 +352,7 @@ private void testMixedVersionsShardsSearch(VersionInformation oldVersion, Versio
         final List responses = new ArrayList<>();
         SearchQueryThenFetchAsyncAction newSearchAsyncAction = new SearchQueryThenFetchAsyncAction(
             logger,
+            null,
             searchTransportService,
             (clusterAlias, node) -> lookup.get(node),
             Collections.singletonMap("_na_", AliasFilter.EMPTY),
@@ -499,6 +501,7 @@ public void sendExecuteQuery(
         CountDownLatch latch = new CountDownLatch(1);
         SearchQueryThenFetchAsyncAction action = new SearchQueryThenFetchAsyncAction(
             logger,
+            null,
             searchTransportService,
             (clusterAlias, node) -> lookup.get(node),
             Collections.singletonMap("_na_", AliasFilter.EMPTY),
@@ -648,6 +651,7 @@ public void sendExecuteQuery(
         CountDownLatch latch = new CountDownLatch(1);
         SearchQueryThenFetchAsyncAction action = new SearchQueryThenFetchAsyncAction(
             logger,
+            null,
             searchTransportService,
             (clusterAlias, node) -> lookup.get(node),
             Collections.singletonMap("_na_", AliasFilter.EMPTY),
diff --git a/server/src/test/java/org/elasticsearch/client/internal/node/NodeClientHeadersTests.java b/server/src/test/java/org/elasticsearch/client/internal/node/NodeClientHeadersTests.java
index c051b8314d7b3..9aea310180410 100644
--- a/server/src/test/java/org/elasticsearch/client/internal/node/NodeClientHeadersTests.java
+++ b/server/src/test/java/org/elasticsearch/client/internal/node/NodeClientHeadersTests.java
@@ -16,7 +16,6 @@
 import org.elasticsearch.action.support.TransportAction;
 import org.elasticsearch.client.internal.AbstractClientHeadersTestCase;
 import org.elasticsearch.client.internal.Client;
-import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
 import org.elasticsearch.common.settings.Settings;
 import org.elasticsearch.tasks.Task;
 import org.elasticsearch.tasks.TaskManager;
@@ -24,7 +23,6 @@
 
 import java.util.Collections;
 import java.util.HashMap;
-import java.util.List;
 
 import static org.mockito.Mockito.mock;
 
@@ -38,14 +36,7 @@ protected Client buildClient(Settings headersSettings, ActionType[] testedAct
         TaskManager taskManager = new TaskManager(settings, threadPool, Collections.emptySet());
         Actions actions = new Actions(testedActions, taskManager);
         NodeClient client = new NodeClient(settings, threadPool);
-        client.initialize(
-            actions,
-            taskManager,
-            () -> "test",
-            mock(Transport.Connection.class),
-            null,
-            new NamedWriteableRegistry(List.of())
-        );
+        client.initialize(actions, taskManager, () -> "test", mock(Transport.Connection.class), null);
         return client;
     }
 
diff --git a/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java b/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java
index 4f25e00f8c083..6c1e1293e099a 100644
--- a/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java
+++ b/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java
@@ -1143,6 +1143,7 @@ public Collection getRestHeaders() {
         return new ActionModule(
             settings.getSettings(),
             TestIndexNameExpressionResolver.newInstance(threadPool.getThreadContext()),
+            null,
             settings.getIndexScopedSettings(),
             settings.getClusterSettings(),
             settings.getSettingsFilter(),
diff --git a/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java b/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java
index 13ecc0841ba55..ca7dd2683f211 100644
--- a/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java
+++ b/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java
@@ -74,7 +74,6 @@
 import org.elasticsearch.cluster.service.MasterService;
 import org.elasticsearch.cluster.version.CompatibilityVersions;
 import org.elasticsearch.common.UUIDs;
-import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
 import org.elasticsearch.common.settings.ClusterSettings;
 import org.elasticsearch.common.settings.IndexScopedSettings;
 import org.elasticsearch.common.settings.Settings;
@@ -269,14 +268,7 @@ public IndexMetadata verifyIndexMetadata(IndexMetadata indexMetadata, IndexVersi
                 actionFilters
             )
         );
-        client.initialize(
-            actions,
-            transportService.getTaskManager(),
-            null,
-            transportService.getLocalNodeConnection(),
-            null,
-            new NamedWriteableRegistry(List.of())
-        );
+        client.initialize(actions, transportService.getTaskManager(), null, transportService.getLocalNodeConnection(), null);
 
         ShardLimitValidator shardLimitValidator = new ShardLimitValidator(SETTINGS, clusterService);
         MetadataIndexStateService indexStateService = new MetadataIndexStateService(
diff --git a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryActionTests.java
index 7000f1a153ac6..761d2b454b134 100644
--- a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryActionTests.java
+++ b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryActionTests.java
@@ -16,7 +16,6 @@
 import org.elasticsearch.action.support.TransportAction;
 import org.elasticsearch.client.internal.node.NodeClient;
 import org.elasticsearch.common.bytes.BytesArray;
-import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
 import org.elasticsearch.common.settings.Settings;
 import org.elasticsearch.common.util.concurrent.ThreadContext;
 import org.elasticsearch.core.RestApiVersion;
@@ -79,14 +78,7 @@ protected void doExecute(Task task, ActionRequest request, ActionListener();
         actions.put(ValidateQueryAction.INSTANCE, transportAction);
 
-        client.initialize(
-            actions,
-            taskManager,
-            () -> "local",
-            mock(Transport.Connection.class),
-            null,
-            new NamedWriteableRegistry(List.of())
-        );
+        client.initialize(actions, taskManager, () -> "local", mock(Transport.Connection.class), null);
         controller.registerHandler(action);
     }
 
diff --git a/server/src/test/java/org/elasticsearch/rest/action/search/RestMultiSearchActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/search/RestMultiSearchActionTests.java
index 6fadb71652163..7ad935744680f 100644
--- a/server/src/test/java/org/elasticsearch/rest/action/search/RestMultiSearchActionTests.java
+++ b/server/src/test/java/org/elasticsearch/rest/action/search/RestMultiSearchActionTests.java
@@ -10,6 +10,7 @@
 
 import org.elasticsearch.action.search.MultiSearchResponse;
 import org.elasticsearch.common.bytes.BytesArray;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
 import org.elasticsearch.common.settings.Settings;
 import org.elasticsearch.core.RestApiVersion;
 import org.elasticsearch.rest.RestRequest;
@@ -18,13 +19,14 @@
 import org.elasticsearch.usage.UsageService;
 import org.elasticsearch.xcontent.XContentType;
 import org.junit.Before;
-import org.mockito.Mockito;
 
 import java.nio.charset.StandardCharsets;
 import java.util.Collections;
 import java.util.List;
 import java.util.Map;
 
+import static org.mockito.Mockito.mock;
+
 public final class RestMultiSearchActionTests extends RestActionTestCase {
     final List contentTypeHeader = Collections.singletonList(compatibleMediaType(XContentType.VND_JSON, RestApiVersion.V_7));
 
@@ -32,10 +34,10 @@ public final class RestMultiSearchActionTests extends RestActionTestCase {
 
     @Before
     public void setUpAction() {
-        action = new RestMultiSearchAction(Settings.EMPTY, new UsageService().getSearchUsageHolder());
+        action = new RestMultiSearchAction(Settings.EMPTY, new UsageService().getSearchUsageHolder(), mock(NamedWriteableRegistry.class));
         controller().registerHandler(action);
-        verifyingClient.setExecuteVerifier((actionType, request) -> Mockito.mock(MultiSearchResponse.class));
-        verifyingClient.setExecuteLocallyVerifier((actionType, request) -> Mockito.mock(MultiSearchResponse.class));
+        verifyingClient.setExecuteVerifier((actionType, request) -> mock(MultiSearchResponse.class));
+        verifyingClient.setExecuteLocallyVerifier((actionType, request) -> mock(MultiSearchResponse.class));
     }
 
     public void testTypeInPath() {
diff --git a/server/src/test/java/org/elasticsearch/rest/action/search/RestSearchActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/search/RestSearchActionTests.java
index 6d0480048982d..6c1a234b32cd9 100644
--- a/server/src/test/java/org/elasticsearch/rest/action/search/RestSearchActionTests.java
+++ b/server/src/test/java/org/elasticsearch/rest/action/search/RestSearchActionTests.java
@@ -10,6 +10,7 @@
 import org.elasticsearch.action.search.SearchRequest;
 import org.elasticsearch.action.search.SearchResponse;
 import org.elasticsearch.action.search.SearchType;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
 import org.elasticsearch.core.RestApiVersion;
 import org.elasticsearch.rest.RestRequest;
 import org.elasticsearch.search.builder.SearchSourceBuilder;
@@ -21,13 +22,14 @@
 import org.elasticsearch.test.rest.RestActionTestCase;
 import org.elasticsearch.usage.UsageService;
 import org.junit.Before;
-import org.mockito.Mockito;
 
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
+import static org.mockito.Mockito.mock;
+
 public final class RestSearchActionTests extends RestActionTestCase {
     final List contentTypeHeader = Collections.singletonList(randomCompatibleMediaType(RestApiVersion.V_7));
 
@@ -35,10 +37,10 @@ public final class RestSearchActionTests extends RestActionTestCase {
 
     @Before
     public void setUpAction() {
-        action = new RestSearchAction(new UsageService().getSearchUsageHolder());
+        action = new RestSearchAction(new UsageService().getSearchUsageHolder(), mock(NamedWriteableRegistry.class));
         controller().registerHandler(action);
-        verifyingClient.setExecuteVerifier((actionType, request) -> Mockito.mock(SearchResponse.class));
-        verifyingClient.setExecuteLocallyVerifier((actionType, request) -> Mockito.mock(SearchResponse.class));
+        verifyingClient.setExecuteVerifier((actionType, request) -> mock(SearchResponse.class));
+        verifyingClient.setExecuteLocallyVerifier((actionType, request) -> mock(SearchResponse.class));
     }
 
     public void testTypeInPath() {
diff --git a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java
index 1df74c787eec4..97275f7305b20 100644
--- a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java
+++ b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java
@@ -2138,8 +2138,7 @@ protected void assertSnapshotOrGenericThread() {
                     transportService.getTaskManager(),
                     () -> clusterService.localNode().getId(),
                     transportService.getLocalNodeConnection(),
-                    transportService.getRemoteClusterService(),
-                    new NamedWriteableRegistry(List.of())
+                    transportService.getRemoteClusterService()
                 );
             }
 
diff --git a/test/external-modules/apm-integration/src/main/java/org/elasticsearch/test/apmintegration/ApmIntegrationPlugin.java b/test/external-modules/apm-integration/src/main/java/org/elasticsearch/test/apmintegration/ApmIntegrationPlugin.java
index 6afd5120c17fe..7ecdf253364f4 100644
--- a/test/external-modules/apm-integration/src/main/java/org/elasticsearch/test/apmintegration/ApmIntegrationPlugin.java
+++ b/test/external-modules/apm-integration/src/main/java/org/elasticsearch/test/apmintegration/ApmIntegrationPlugin.java
@@ -10,6 +10,7 @@
 
 import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
 import org.elasticsearch.cluster.node.DiscoveryNodes;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
 import org.elasticsearch.common.settings.ClusterSettings;
 import org.elasticsearch.common.settings.IndexScopedSettings;
 import org.elasticsearch.common.settings.Settings;
@@ -30,6 +31,7 @@ public class ApmIntegrationPlugin extends Plugin implements ActionPlugin {
     @Override
     public List getRestHandlers(
         final Settings settings,
+        NamedWriteableRegistry namedWriteableRegistry,
         final RestController restController,
         final ClusterSettings clusterSettings,
         final IndexScopedSettings indexScopedSettings,
diff --git a/test/external-modules/die-with-dignity/src/main/java/org/elasticsearch/test/diewithdignity/DieWithDignityPlugin.java b/test/external-modules/die-with-dignity/src/main/java/org/elasticsearch/test/diewithdignity/DieWithDignityPlugin.java
index 96e48b1c3fd58..c974551fbbc15 100644
--- a/test/external-modules/die-with-dignity/src/main/java/org/elasticsearch/test/diewithdignity/DieWithDignityPlugin.java
+++ b/test/external-modules/die-with-dignity/src/main/java/org/elasticsearch/test/diewithdignity/DieWithDignityPlugin.java
@@ -10,6 +10,7 @@
 
 import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
 import org.elasticsearch.cluster.node.DiscoveryNodes;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
 import org.elasticsearch.common.settings.ClusterSettings;
 import org.elasticsearch.common.settings.IndexScopedSettings;
 import org.elasticsearch.common.settings.Settings;
@@ -28,6 +29,7 @@ public class DieWithDignityPlugin extends Plugin implements ActionPlugin {
     @Override
     public List getRestHandlers(
         final Settings settings,
+        NamedWriteableRegistry namedWriteableRegistry,
         final RestController restController,
         final ClusterSettings clusterSettings,
         final IndexScopedSettings indexScopedSettings,
diff --git a/test/external-modules/seek-tracking-directory/src/main/java/org/elasticsearch/test/seektracker/SeekTrackerPlugin.java b/test/external-modules/seek-tracking-directory/src/main/java/org/elasticsearch/test/seektracker/SeekTrackerPlugin.java
index aa9ff52b00824..5ecc9c9deb2bf 100644
--- a/test/external-modules/seek-tracking-directory/src/main/java/org/elasticsearch/test/seektracker/SeekTrackerPlugin.java
+++ b/test/external-modules/seek-tracking-directory/src/main/java/org/elasticsearch/test/seektracker/SeekTrackerPlugin.java
@@ -13,6 +13,7 @@
 import org.elasticsearch.action.ActionType;
 import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
 import org.elasticsearch.cluster.node.DiscoveryNodes;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
 import org.elasticsearch.common.settings.ClusterSettings;
 import org.elasticsearch.common.settings.IndexScopedSettings;
 import org.elasticsearch.common.settings.Setting;
@@ -70,6 +71,7 @@ public void onIndexModule(IndexModule indexModule) {
     @Override
     public List getRestHandlers(
         Settings settings,
+        NamedWriteableRegistry namedWriteableRegistry,
         RestController restController,
         ClusterSettings clusterSettings,
         IndexScopedSettings indexScopedSettings,
diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java
index 611f2ab9f5749..1d76c1e40910e 100644
--- a/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java
+++ b/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java
@@ -1181,8 +1181,7 @@ public RecyclerBytesStreamOutput newNetworkBytesStream() {
                     transportService.getTaskManager(),
                     localNode::getId,
                     transportService.getLocalNodeConnection(),
-                    null,
-                    getNamedWriteableRegistry()
+                    null
                 );
                 stableMasterHealthIndicatorService = new StableMasterHealthIndicatorService(coordinationDiagnosticsService, clusterService);
                 masterService.setClusterStatePublisher(coordinator);
diff --git a/test/framework/src/main/java/org/elasticsearch/test/client/NoOpNodeClient.java b/test/framework/src/main/java/org/elasticsearch/test/client/NoOpNodeClient.java
index 766c9176c6846..a59bb020226aa 100644
--- a/test/framework/src/main/java/org/elasticsearch/test/client/NoOpNodeClient.java
+++ b/test/framework/src/main/java/org/elasticsearch/test/client/NoOpNodeClient.java
@@ -15,7 +15,6 @@
 import org.elasticsearch.action.support.TransportAction;
 import org.elasticsearch.client.internal.Client;
 import org.elasticsearch.client.internal.node.NodeClient;
-import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
 import org.elasticsearch.common.settings.Settings;
 import org.elasticsearch.tasks.Task;
 import org.elasticsearch.tasks.TaskManager;
@@ -58,8 +57,7 @@ public void initialize(
         TaskManager taskManager,
         Supplier localNodeId,
         Transport.Connection localConnection,
-        RemoteClusterService remoteClusterService,
-        NamedWriteableRegistry namedWriteableRegistry
+        RemoteClusterService remoteClusterService
     ) {
         throw new UnsupportedOperationException("cannot initialize " + this.getClass().getSimpleName());
     }
diff --git a/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/AsyncSearch.java b/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/AsyncSearch.java
index e15f9781a069f..195d00169840a 100644
--- a/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/AsyncSearch.java
+++ b/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/AsyncSearch.java
@@ -10,6 +10,7 @@
 import org.elasticsearch.action.ActionResponse;
 import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
 import org.elasticsearch.cluster.node.DiscoveryNodes;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
 import org.elasticsearch.common.settings.ClusterSettings;
 import org.elasticsearch.common.settings.IndexScopedSettings;
 import org.elasticsearch.common.settings.Setting;
@@ -44,6 +45,7 @@ public final class AsyncSearch extends Plugin implements ActionPlugin {
     @Override
     public List getRestHandlers(
         Settings settings,
+        NamedWriteableRegistry namedWriteableRegistry,
         RestController restController,
         ClusterSettings clusterSettings,
         IndexScopedSettings indexScopedSettings,
@@ -52,7 +54,7 @@ public List getRestHandlers(
         Supplier nodesInCluster
     ) {
         return Arrays.asList(
-            new RestSubmitAsyncSearchAction(restController.getSearchUsageHolder()),
+            new RestSubmitAsyncSearchAction(restController.getSearchUsageHolder(), namedWriteableRegistry),
             new RestGetAsyncSearchAction(),
             new RestGetAsyncStatusAction(),
             new RestDeleteAsyncSearchAction()
diff --git a/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/RestSubmitAsyncSearchAction.java b/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/RestSubmitAsyncSearchAction.java
index 8f554d4d8705c..f88207343bd60 100644
--- a/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/RestSubmitAsyncSearchAction.java
+++ b/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/RestSubmitAsyncSearchAction.java
@@ -7,6 +7,7 @@
 package org.elasticsearch.xpack.search;
 
 import org.elasticsearch.client.internal.node.NodeClient;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
 import org.elasticsearch.rest.BaseRestHandler;
 import org.elasticsearch.rest.RestRequest;
 import org.elasticsearch.rest.RestStatus;
@@ -34,9 +35,11 @@ public final class RestSubmitAsyncSearchAction extends BaseRestHandler {
     static final Set RESPONSE_PARAMS = Collections.singleton(TYPED_KEYS_PARAM);
 
     private final SearchUsageHolder searchUsageHolder;
+    private final NamedWriteableRegistry namedWriteableRegistry;
 
-    public RestSubmitAsyncSearchAction(SearchUsageHolder searchUsageHolder) {
+    public RestSubmitAsyncSearchAction(SearchUsageHolder searchUsageHolder, NamedWriteableRegistry namedWriteableRegistry) {
         this.searchUsageHolder = searchUsageHolder;
+        this.namedWriteableRegistry = namedWriteableRegistry;
     }
 
     @Override
@@ -58,14 +61,7 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli
         // them as supported. We rely on SubmitAsyncSearchRequest#validate to fail in case they are set.
         // Note that ccs_minimize_roundtrips is also set this way, which is a supported option.
         request.withContentOrSourceParamParserOrNull(
-            parser -> parseSearchRequest(
-                submit.getSearchRequest(),
-                request,
-                parser,
-                client.getNamedWriteableRegistry(),
-                setSize,
-                searchUsageHolder
-            )
+            parser -> parseSearchRequest(submit.getSearchRequest(), request, parser, namedWriteableRegistry, setSize, searchUsageHolder)
         );
 
         if (request.hasParam("wait_for_completion_timeout")) {
diff --git a/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/RestSubmitAsyncSearchActionTests.java b/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/RestSubmitAsyncSearchActionTests.java
index 57cab31e7aaaf..0130746ab1702 100644
--- a/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/RestSubmitAsyncSearchActionTests.java
+++ b/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/RestSubmitAsyncSearchActionTests.java
@@ -8,6 +8,7 @@
 
 import org.apache.lucene.util.SetOnce;
 import org.elasticsearch.common.bytes.BytesArray;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
 import org.elasticsearch.common.util.concurrent.ThreadContext;
 import org.elasticsearch.core.TimeValue;
 import org.elasticsearch.rest.RestRequest;
@@ -26,6 +27,7 @@
 
 import static org.hamcrest.Matchers.equalTo;
 import static org.hamcrest.Matchers.instanceOf;
+import static org.mockito.Mockito.mock;
 
 public class RestSubmitAsyncSearchActionTests extends RestActionTestCase {
 
@@ -33,7 +35,7 @@ public class RestSubmitAsyncSearchActionTests extends RestActionTestCase {
 
     @Before
     public void setUpAction() {
-        action = new RestSubmitAsyncSearchAction(new UsageService().getSearchUsageHolder());
+        action = new RestSubmitAsyncSearchAction(new UsageService().getSearchUsageHolder(), mock(NamedWriteableRegistry.class));
         controller().registerHandler(action);
     }
 
diff --git a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/Autoscaling.java b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/Autoscaling.java
index e9d54826436c2..60220391a2165 100644
--- a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/Autoscaling.java
+++ b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/Autoscaling.java
@@ -130,6 +130,7 @@ public List> getSettings() {
     @Override
     public List getRestHandlers(
         final Settings settings,
+        NamedWriteableRegistry namedWriteableRegistry,
         final RestController controller,
         final ClusterSettings clusterSettings,
         final IndexScopedSettings indexScopedSettings,
diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java
index 7234b7babffdc..7e0e2d1493417 100644
--- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java
+++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java
@@ -257,6 +257,7 @@ public List> getPersistentTasksExecutor(
 
     public List getRestHandlers(
         Settings unused,
+        NamedWriteableRegistry namedWriteableRegistry,
         RestController restController,
         ClusterSettings clusterSettings,
         IndexScopedSettings indexScopedSettings,
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java
index 5960c4c6f79d2..31c772f96f889 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java
@@ -21,6 +21,7 @@
 import org.elasticsearch.cluster.routing.allocation.DataTier;
 import org.elasticsearch.cluster.routing.allocation.decider.AllocationDecider;
 import org.elasticsearch.cluster.service.ClusterService;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
 import org.elasticsearch.common.logging.DeprecationCategory;
 import org.elasticsearch.common.logging.DeprecationLogger;
 import org.elasticsearch.common.settings.ClusterSettings;
@@ -381,6 +382,7 @@ protected Class>
     @Override
     public List getRestHandlers(
         Settings settings,
+        NamedWriteableRegistry namedWriteableRegistry,
         RestController restController,
         ClusterSettings clusterSettings,
         IndexScopedSettings indexScopedSettings,
diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java
index a383004c12878..510b98c07a60f 100644
--- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java
+++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java
@@ -232,6 +232,7 @@ public List getActionFilters() {
     @Override
     public List getRestHandlers(
         Settings settings,
+        NamedWriteableRegistry namedWriteableRegistry,
         RestController restController,
         ClusterSettings clusterSettings,
         IndexScopedSettings indexScopedSettings,
@@ -242,6 +243,7 @@ public List getRestHandlers(
         List handlers = new ArrayList<>(
             super.getRestHandlers(
                 settings,
+                namedWriteableRegistry,
                 restController,
                 clusterSettings,
                 indexScopedSettings,
@@ -254,6 +256,7 @@ public List getRestHandlers(
             p -> handlers.addAll(
                 p.getRestHandlers(
                     settings,
+                    namedWriteableRegistry,
                     restController,
                     clusterSettings,
                     indexScopedSettings,
diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/termsenum/action/RestTermsEnumActionTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/termsenum/action/RestTermsEnumActionTests.java
index d4500d9439329..b0ad137f0f1b6 100644
--- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/termsenum/action/RestTermsEnumActionTests.java
+++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/termsenum/action/RestTermsEnumActionTests.java
@@ -14,7 +14,6 @@
 import org.elasticsearch.action.support.TransportAction;
 import org.elasticsearch.client.internal.node.NodeClient;
 import org.elasticsearch.common.bytes.BytesArray;
-import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
 import org.elasticsearch.common.settings.Settings;
 import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
 import org.elasticsearch.rest.RestController;
@@ -38,7 +37,6 @@
 
 import java.util.Collections;
 import java.util.HashMap;
-import java.util.List;
 import java.util.Map;
 
 import static java.util.Collections.emptyMap;
@@ -77,14 +75,7 @@ protected void doExecute(Task task, ActionRequest request, ActionListener();
         actions.put(TermsEnumAction.INSTANCE, transportAction);
 
-        client.initialize(
-            actions,
-            taskManager,
-            () -> "local",
-            mock(Transport.Connection.class),
-            null,
-            new NamedWriteableRegistry(List.of())
-        );
+        client.initialize(actions, taskManager, () -> "local", mock(Transport.Connection.class), null);
         controller.registerHandler(action);
     }
 
diff --git a/x-pack/plugin/deprecation/qa/rest/src/main/java/org/elasticsearch/xpack/deprecation/TestDeprecationPlugin.java b/x-pack/plugin/deprecation/qa/rest/src/main/java/org/elasticsearch/xpack/deprecation/TestDeprecationPlugin.java
index 01c449645fa99..8080761983136 100644
--- a/x-pack/plugin/deprecation/qa/rest/src/main/java/org/elasticsearch/xpack/deprecation/TestDeprecationPlugin.java
+++ b/x-pack/plugin/deprecation/qa/rest/src/main/java/org/elasticsearch/xpack/deprecation/TestDeprecationPlugin.java
@@ -8,6 +8,7 @@
 
 import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
 import org.elasticsearch.cluster.node.DiscoveryNodes;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
 import org.elasticsearch.common.settings.ClusterSettings;
 import org.elasticsearch.common.settings.IndexScopedSettings;
 import org.elasticsearch.common.settings.Setting;
@@ -34,6 +35,7 @@ public class TestDeprecationPlugin extends Plugin implements ActionPlugin, Searc
     @Override
     public List getRestHandlers(
         Settings settings,
+        NamedWriteableRegistry namedWriteableRegistry,
         RestController restController,
         ClusterSettings clusterSettings,
         IndexScopedSettings indexScopedSettings,
diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/Deprecation.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/Deprecation.java
index dd060653a4f34..329370929ec53 100644
--- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/Deprecation.java
+++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/Deprecation.java
@@ -10,6 +10,7 @@
 import org.elasticsearch.action.ActionResponse;
 import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
 import org.elasticsearch.cluster.node.DiscoveryNodes;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
 import org.elasticsearch.common.logging.RateLimitingFilter;
 import org.elasticsearch.common.settings.ClusterSettings;
 import org.elasticsearch.common.settings.IndexScopedSettings;
@@ -63,6 +64,7 @@ public class Deprecation extends Plugin implements ActionPlugin {
     @Override
     public List getRestHandlers(
         Settings settings,
+        NamedWriteableRegistry namedWriteableRegistry,
         RestController restController,
         ClusterSettings clusterSettings,
         IndexScopedSettings indexScopedSettings,
diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/Downsample.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/Downsample.java
index 84dfb4169eb92..260782a3eb0f3 100644
--- a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/Downsample.java
+++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/Downsample.java
@@ -75,6 +75,7 @@ public List> getExecutorBuilders(Settings settings) {
     @Override
     public List getRestHandlers(
         Settings settings,
+        NamedWriteableRegistry namedWriteableRegistry,
         RestController restController,
         ClusterSettings clusterSettings,
         IndexScopedSettings indexScopedSettings,
diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPlugin.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPlugin.java
index 8e0c96c6ee245..e9a075227107c 100644
--- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPlugin.java
+++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPlugin.java
@@ -161,6 +161,7 @@ protected XPackLicenseState getLicenseState() {
     @Override
     public List getRestHandlers(
         Settings unused,
+        NamedWriteableRegistry namedWriteableRegistry,
         RestController restController,
         ClusterSettings clusterSettings,
         IndexScopedSettings indexScopedSettings,
diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java
index 73025c0b23b56..4c28579483152 100644
--- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java
+++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java
@@ -11,6 +11,7 @@
 import org.elasticsearch.action.ActionResponse;
 import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
 import org.elasticsearch.cluster.node.DiscoveryNodes;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
 import org.elasticsearch.common.settings.ClusterSettings;
 import org.elasticsearch.common.settings.IndexScopedSettings;
 import org.elasticsearch.common.settings.Setting;
@@ -257,6 +258,7 @@ protected XPackLicenseState getLicenseState() {
     @Override
     public List getRestHandlers(
         Settings settings,
+        NamedWriteableRegistry namedWriteableRegistry,
         RestController restController,
         ClusterSettings clusterSettings,
         IndexScopedSettings indexScopedSettings,
diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/LocalStateEnterpriseSearch.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/LocalStateEnterpriseSearch.java
index 2e181fda1ef88..67c918dac94c9 100644
--- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/LocalStateEnterpriseSearch.java
+++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/LocalStateEnterpriseSearch.java
@@ -11,6 +11,7 @@
 import org.elasticsearch.action.ActionResponse;
 import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
 import org.elasticsearch.cluster.node.DiscoveryNodes;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
 import org.elasticsearch.common.settings.ClusterSettings;
 import org.elasticsearch.common.settings.IndexScopedSettings;
 import org.elasticsearch.common.settings.Setting;
@@ -70,6 +71,7 @@ protected XPackLicenseState getLicenseState() {
     @Override
     public List getRestHandlers(
         Settings settings,
+        NamedWriteableRegistry namedWriteableRegistry,
         RestController restController,
         ClusterSettings clusterSettings,
         IndexScopedSettings indexScopedSettings,
@@ -79,6 +81,7 @@ public List getRestHandlers(
     ) {
         return entSearchPlugin.getRestHandlers(
             settings,
+            namedWriteableRegistry,
             restController,
             clusterSettings,
             indexScopedSettings,
diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plugin/EqlPlugin.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plugin/EqlPlugin.java
index 881cb083a48f2..fe21051b4063e 100644
--- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plugin/EqlPlugin.java
+++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plugin/EqlPlugin.java
@@ -14,6 +14,7 @@
 import org.elasticsearch.cluster.node.DiscoveryNodes;
 import org.elasticsearch.cluster.service.ClusterService;
 import org.elasticsearch.common.breaker.CircuitBreaker;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
 import org.elasticsearch.common.settings.ClusterSettings;
 import org.elasticsearch.common.settings.IndexScopedSettings;
 import org.elasticsearch.common.settings.Setting;
@@ -101,6 +102,7 @@ public List> getSettings() {
     @Override
     public List getRestHandlers(
         Settings settings,
+        NamedWriteableRegistry namedWriteableRegistry,
         RestController restController,
         ClusterSettings clusterSettings,
         IndexScopedSettings indexScopedSettings,
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java
index 07ca55aa665eb..aba1f5cfd6b40 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java
@@ -138,6 +138,7 @@ public List> getSettings() {
     @Override
     public List getRestHandlers(
         Settings settings,
+        NamedWriteableRegistry namedWriteableRegistry,
         RestController restController,
         ClusterSettings clusterSettings,
         IndexScopedSettings indexScopedSettings,
diff --git a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/Fleet.java b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/Fleet.java
index fb8d68541d4d1..270786b1f82e7 100644
--- a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/Fleet.java
+++ b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/Fleet.java
@@ -24,6 +24,7 @@
 import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
 import org.elasticsearch.cluster.node.DiscoveryNodes;
 import org.elasticsearch.cluster.service.ClusterService;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
 import org.elasticsearch.common.settings.ClusterSettings;
 import org.elasticsearch.common.settings.IndexScopedSettings;
 import org.elasticsearch.common.settings.Settings;
@@ -354,6 +355,7 @@ private static String loadTemplateSource(String resource, int mappingsVersion) {
     @Override
     public List getRestHandlers(
         Settings settings,
+        NamedWriteableRegistry namedWriteableRegistry,
         RestController restController,
         ClusterSettings clusterSettings,
         IndexScopedSettings indexScopedSettings,
@@ -363,8 +365,8 @@ public List getRestHandlers(
     ) {
         return List.of(
             new RestGetGlobalCheckpointsAction(),
-            new RestFleetSearchAction(restController.getSearchUsageHolder()),
-            new RestFleetMultiSearchAction(settings, restController.getSearchUsageHolder()),
+            new RestFleetSearchAction(restController.getSearchUsageHolder(), namedWriteableRegistry),
+            new RestFleetMultiSearchAction(settings, restController.getSearchUsageHolder(), namedWriteableRegistry),
             new RestGetSecretsAction(),
             new RestPostSecretsAction(),
             new RestDeleteSecretsAction()
diff --git a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/rest/RestFleetMultiSearchAction.java b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/rest/RestFleetMultiSearchAction.java
index c177bea2e63ca..5e7ef365b6592 100644
--- a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/rest/RestFleetMultiSearchAction.java
+++ b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/rest/RestFleetMultiSearchAction.java
@@ -11,6 +11,7 @@
 import org.elasticsearch.action.search.SearchRequest;
 import org.elasticsearch.action.search.TransportMultiSearchAction;
 import org.elasticsearch.client.internal.node.NodeClient;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
 import org.elasticsearch.common.settings.Settings;
 import org.elasticsearch.core.TimeValue;
 import org.elasticsearch.rest.BaseRestHandler;
@@ -40,10 +41,16 @@ public class RestFleetMultiSearchAction extends BaseRestHandler {
 
     private final boolean allowExplicitIndex;
     private final SearchUsageHolder searchUsageHolder;
+    private final NamedWriteableRegistry namedWriteableRegistry;
 
-    public RestFleetMultiSearchAction(Settings settings, SearchUsageHolder searchUsageHolder) {
+    public RestFleetMultiSearchAction(
+        Settings settings,
+        SearchUsageHolder searchUsageHolder,
+        NamedWriteableRegistry namedWriteableRegistry
+    ) {
         this.allowExplicitIndex = MULTI_ALLOW_EXPLICIT_INDEX.get(settings);
         this.searchUsageHolder = searchUsageHolder;
+        this.namedWriteableRegistry = namedWriteableRegistry;
     }
 
     @Override
@@ -65,7 +72,7 @@ public List routes() {
     protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException {
         final MultiSearchRequest multiSearchRequest = RestMultiSearchAction.parseRequest(
             request,
-            client.getNamedWriteableRegistry(),
+            namedWriteableRegistry,
             allowExplicitIndex,
             searchUsageHolder,
             (key, value, searchRequest) -> {
diff --git a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/rest/RestFleetSearchAction.java b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/rest/RestFleetSearchAction.java
index 73af65b2f31a6..ce606fdd17363 100644
--- a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/rest/RestFleetSearchAction.java
+++ b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/rest/RestFleetSearchAction.java
@@ -12,6 +12,7 @@
 import org.elasticsearch.action.search.TransportSearchAction;
 import org.elasticsearch.client.internal.node.NodeClient;
 import org.elasticsearch.common.Strings;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
 import org.elasticsearch.core.TimeValue;
 import org.elasticsearch.rest.BaseRestHandler;
 import org.elasticsearch.rest.RestRequest;
@@ -36,9 +37,11 @@
 public class RestFleetSearchAction extends BaseRestHandler {
 
     private final SearchUsageHolder searchUsageHolder;
+    private final NamedWriteableRegistry namedWriteableRegistry;
 
-    public RestFleetSearchAction(SearchUsageHolder searchUsageHolder) {
+    public RestFleetSearchAction(SearchUsageHolder searchUsageHolder, NamedWriteableRegistry namedWriteableRegistry) {
         this.searchUsageHolder = searchUsageHolder;
+        this.namedWriteableRegistry = namedWriteableRegistry;
     }
 
     @Override
@@ -68,14 +71,7 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli
 
         IntConsumer setSize = size -> searchRequest.source().size(size);
         request.withContentOrSourceParamParserOrNull(parser -> {
-            RestSearchAction.parseSearchRequest(
-                searchRequest,
-                request,
-                parser,
-                client.getNamedWriteableRegistry(),
-                setSize,
-                searchUsageHolder
-            );
+            RestSearchAction.parseSearchRequest(searchRequest, request, parser, namedWriteableRegistry, setSize, searchUsageHolder);
             String[] stringWaitForCheckpoints = request.paramAsStringArray("wait_for_checkpoints", Strings.EMPTY_ARRAY);
             final long[] waitForCheckpoints = new long[stringWaitForCheckpoints.length];
             for (int i = 0; i < stringWaitForCheckpoints.length; ++i) {
diff --git a/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/FrozenIndices.java b/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/FrozenIndices.java
index cf727d93702bb..8931669c53ce8 100644
--- a/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/FrozenIndices.java
+++ b/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/FrozenIndices.java
@@ -10,6 +10,7 @@
 import org.elasticsearch.action.ActionResponse;
 import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
 import org.elasticsearch.cluster.node.DiscoveryNodes;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
 import org.elasticsearch.common.settings.ClusterSettings;
 import org.elasticsearch.common.settings.IndexScopedSettings;
 import org.elasticsearch.common.settings.Setting;
@@ -64,6 +65,7 @@ public List> getSettings() {
     @Override
     public List getRestHandlers(
         Settings settings,
+        NamedWriteableRegistry namedWriteableRegistry,
         RestController restController,
         ClusterSettings clusterSettings,
         IndexScopedSettings indexScopedSettings,
diff --git a/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/Graph.java b/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/Graph.java
index 4e9502c073d2a..c64b5ada48c3f 100644
--- a/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/Graph.java
+++ b/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/Graph.java
@@ -10,6 +10,7 @@
 import org.elasticsearch.action.ActionResponse;
 import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
 import org.elasticsearch.cluster.node.DiscoveryNodes;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
 import org.elasticsearch.common.settings.ClusterSettings;
 import org.elasticsearch.common.settings.IndexScopedSettings;
 import org.elasticsearch.common.settings.Settings;
@@ -57,6 +58,7 @@ public Graph(Settings settings) {
     @Override
     public List getRestHandlers(
         Settings settings,
+        NamedWriteableRegistry namedWriteableRegistry,
         RestController restController,
         ClusterSettings clusterSettings,
         IndexScopedSettings indexScopedSettings,
diff --git a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/IdentityProviderPlugin.java b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/IdentityProviderPlugin.java
index 7a279e367a0e8..a61e4c4e1c69e 100644
--- a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/IdentityProviderPlugin.java
+++ b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/IdentityProviderPlugin.java
@@ -11,6 +11,7 @@
 import org.elasticsearch.action.ActionResponse;
 import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
 import org.elasticsearch.cluster.node.DiscoveryNodes;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
 import org.elasticsearch.common.settings.ClusterSettings;
 import org.elasticsearch.common.settings.IndexScopedSettings;
 import org.elasticsearch.common.settings.Setting;
@@ -128,6 +129,7 @@ public Collection createComponents(PluginServices services) {
     @Override
     public List getRestHandlers(
         Settings unused,
+        NamedWriteableRegistry namedWriteableRegistry,
         RestController restController,
         ClusterSettings clusterSettings,
         IndexScopedSettings indexScopedSettings,
diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycle.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycle.java
index 1b52486f2b5ea..e013eb1520f29 100644
--- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycle.java
+++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycle.java
@@ -14,6 +14,7 @@
 import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
 import org.elasticsearch.cluster.metadata.Metadata;
 import org.elasticsearch.cluster.node.DiscoveryNodes;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
 import org.elasticsearch.common.settings.ClusterSettings;
 import org.elasticsearch.common.settings.IndexScopedSettings;
 import org.elasticsearch.common.settings.Setting;
@@ -248,6 +249,7 @@ private static List xContentEntries() {
     @Override
     public List getRestHandlers(
         Settings unused,
+        NamedWriteableRegistry namedWriteableRegistry,
         RestController restController,
         ClusterSettings clusterSettings,
         IndexScopedSettings indexScopedSettings,
diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java
index 33d71c65ed643..4b3da18008eac 100644
--- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java
+++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java
@@ -97,6 +97,7 @@ public InferencePlugin(Settings settings) {
     @Override
     public List getRestHandlers(
         Settings settings,
+        NamedWriteableRegistry namedWriteableRegistry,
         RestController restController,
         ClusterSettings clusterSettings,
         IndexScopedSettings indexScopedSettings,
diff --git a/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/Logstash.java b/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/Logstash.java
index 204bebcff4499..b6215b4efe5ba 100644
--- a/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/Logstash.java
+++ b/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/Logstash.java
@@ -13,6 +13,7 @@
 import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
 import org.elasticsearch.cluster.metadata.IndexTemplateMetadata;
 import org.elasticsearch.cluster.node.DiscoveryNodes;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
 import org.elasticsearch.common.settings.ClusterSettings;
 import org.elasticsearch.common.settings.IndexScopedSettings;
 import org.elasticsearch.common.settings.Settings;
@@ -78,6 +79,7 @@ public Logstash() {}
     @Override
     public List getRestHandlers(
         Settings settings,
+        NamedWriteableRegistry namedWriteableRegistry,
         RestController restController,
         ClusterSettings clusterSettings,
         IndexScopedSettings indexScopedSettings,
diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java
index f3254245168b8..61835c4838110 100644
--- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java
+++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java
@@ -1348,6 +1348,7 @@ public List> getPersistentTasksExecutor(
     @Override
     public List getRestHandlers(
         Settings unused,
+        NamedWriteableRegistry namedWriteableRegistry,
         RestController restController,
         ClusterSettings clusterSettings,
         IndexScopedSettings indexScopedSettings,
diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MachineLearningTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MachineLearningTests.java
index f5f81a5ca15f3..a47849e33f578 100644
--- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MachineLearningTests.java
+++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MachineLearningTests.java
@@ -223,7 +223,7 @@ public void testAnomalyDetectionOnly() throws IOException {
         Settings settings = Settings.builder().put("path.home", createTempDir()).build();
         MlTestExtensionLoader loader = new MlTestExtensionLoader(new MlTestExtension(false, false, true, false, false, false));
         try (MachineLearning machineLearning = createTrialLicensedMachineLearning(settings, loader)) {
-            List restHandlers = machineLearning.getRestHandlers(settings, null, null, null, null, null, null);
+            List restHandlers = machineLearning.getRestHandlers(settings, null, null, null, null, null, null, null);
             assertThat(restHandlers, hasItem(instanceOf(RestMlInfoAction.class)));
             assertThat(restHandlers, hasItem(instanceOf(RestGetJobsAction.class)));
             assertThat(restHandlers, not(hasItem(instanceOf(RestGetTrainedModelsAction.class))));
@@ -243,7 +243,7 @@ public void testDataFrameAnalyticsOnly() throws IOException {
         Settings settings = Settings.builder().put("path.home", createTempDir()).build();
         MlTestExtensionLoader loader = new MlTestExtensionLoader(new MlTestExtension(false, false, false, true, false, false));
         try (MachineLearning machineLearning = createTrialLicensedMachineLearning(settings, loader)) {
-            List restHandlers = machineLearning.getRestHandlers(settings, null, null, null, null, null, null);
+            List restHandlers = machineLearning.getRestHandlers(settings, null, null, null, null, null, null, null);
             assertThat(restHandlers, hasItem(instanceOf(RestMlInfoAction.class)));
             assertThat(restHandlers, not(hasItem(instanceOf(RestGetJobsAction.class))));
             assertThat(restHandlers, hasItem(instanceOf(RestGetTrainedModelsAction.class)));
@@ -263,7 +263,7 @@ public void testNlpOnly() throws IOException {
         Settings settings = Settings.builder().put("path.home", createTempDir()).build();
         MlTestExtensionLoader loader = new MlTestExtensionLoader(new MlTestExtension(false, false, false, false, true, false));
         try (MachineLearning machineLearning = createTrialLicensedMachineLearning(settings, loader)) {
-            List restHandlers = machineLearning.getRestHandlers(settings, null, null, null, null, null, null);
+            List restHandlers = machineLearning.getRestHandlers(settings, null, null, null, null, null, null, null);
             assertThat(restHandlers, hasItem(instanceOf(RestMlInfoAction.class)));
             assertThat(restHandlers, not(hasItem(instanceOf(RestGetJobsAction.class))));
             assertThat(restHandlers, hasItem(instanceOf(RestGetTrainedModelsAction.class)));
diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/Monitoring.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/Monitoring.java
index bab8e5b22c37a..92d46e54ea1cc 100644
--- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/Monitoring.java
+++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/Monitoring.java
@@ -13,6 +13,7 @@
 import org.elasticsearch.cluster.metadata.IndexTemplateMetadata;
 import org.elasticsearch.cluster.node.DiscoveryNodes;
 import org.elasticsearch.cluster.service.ClusterService;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
 import org.elasticsearch.common.settings.ClusterSettings;
 import org.elasticsearch.common.settings.IndexScopedSettings;
 import org.elasticsearch.common.settings.Setting;
@@ -183,6 +184,7 @@ public Collection createComponents(PluginServices services) {
     @Override
     public List getRestHandlers(
         Settings unused,
+        NamedWriteableRegistry namedWriteableRegistry,
         RestController restController,
         ClusterSettings clusterSettings,
         IndexScopedSettings indexScopedSettings,
diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingPlugin.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingPlugin.java
index b105cde3d5c2a..c07d2a480b006 100644
--- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingPlugin.java
+++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingPlugin.java
@@ -16,6 +16,7 @@
 import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
 import org.elasticsearch.cluster.node.DiscoveryNodes;
 import org.elasticsearch.cluster.service.ClusterService;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
 import org.elasticsearch.common.settings.ClusterSettings;
 import org.elasticsearch.common.settings.IndexScopedSettings;
 import org.elasticsearch.common.settings.Setting;
@@ -117,6 +118,7 @@ public void updateTemplatesEnabled(boolean newValue) {
     @Override
     public List getRestHandlers(
         final Settings settings,
+        NamedWriteableRegistry namedWriteableRegistry,
         final RestController restController,
         final ClusterSettings clusterSettings,
         final IndexScopedSettings indexScopedSettings,
diff --git a/x-pack/plugin/repositories-metering-api/src/main/java/org/elasticsearch/xpack/repositories/metering/RepositoriesMeteringPlugin.java b/x-pack/plugin/repositories-metering-api/src/main/java/org/elasticsearch/xpack/repositories/metering/RepositoriesMeteringPlugin.java
index f9ba295d4dd4d..4441ec70f74aa 100644
--- a/x-pack/plugin/repositories-metering-api/src/main/java/org/elasticsearch/xpack/repositories/metering/RepositoriesMeteringPlugin.java
+++ b/x-pack/plugin/repositories-metering-api/src/main/java/org/elasticsearch/xpack/repositories/metering/RepositoriesMeteringPlugin.java
@@ -11,6 +11,7 @@
 import org.elasticsearch.action.ActionResponse;
 import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
 import org.elasticsearch.cluster.node.DiscoveryNodes;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
 import org.elasticsearch.common.settings.ClusterSettings;
 import org.elasticsearch.common.settings.IndexScopedSettings;
 import org.elasticsearch.common.settings.Settings;
@@ -42,6 +43,7 @@ public final class RepositoriesMeteringPlugin extends Plugin implements ActionPl
     @Override
     public List getRestHandlers(
         Settings settings,
+        NamedWriteableRegistry namedWriteableRegistry,
         RestController restController,
         ClusterSettings clusterSettings,
         IndexScopedSettings indexScopedSettings,
diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/Rollup.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/Rollup.java
index 1a1aa81e0eb7b..39e68a11c0d59 100644
--- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/Rollup.java
+++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/Rollup.java
@@ -13,6 +13,7 @@
 import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
 import org.elasticsearch.cluster.node.DiscoveryNodes;
 import org.elasticsearch.cluster.service.ClusterService;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
 import org.elasticsearch.common.scheduler.SchedulerEngine;
 import org.elasticsearch.common.settings.ClusterSettings;
 import org.elasticsearch.common.settings.IndexScopedSettings;
@@ -86,6 +87,7 @@ public Rollup(Settings settings) {
     @Override
     public List getRestHandlers(
         Settings unused,
+        NamedWriteableRegistry namedWriteableRegistry,
         RestController restController,
         ClusterSettings clusterSettings,
         IndexScopedSettings indexScopedSettings,
@@ -94,7 +96,7 @@ public List getRestHandlers(
         Supplier nodesInCluster
     ) {
         return Arrays.asList(
-            new RestRollupSearchAction(),
+            new RestRollupSearchAction(namedWriteableRegistry),
             new RestPutRollupJobAction(),
             new RestStartRollupJobAction(),
             new RestStopRollupJobAction(),
diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestRollupSearchAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestRollupSearchAction.java
index 266f515d1dbb6..68c8fba19af4e 100644
--- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestRollupSearchAction.java
+++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestRollupSearchAction.java
@@ -8,6 +8,7 @@
 
 import org.elasticsearch.action.search.SearchRequest;
 import org.elasticsearch.client.internal.node.NodeClient;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
 import org.elasticsearch.rest.BaseRestHandler;
 import org.elasticsearch.rest.RestRequest;
 import org.elasticsearch.rest.action.RestRefCountedChunkedToXContentListener;
@@ -25,6 +26,12 @@ public class RestRollupSearchAction extends BaseRestHandler {
 
     private static final Set RESPONSE_PARAMS = Set.of(RestSearchAction.TYPED_KEYS_PARAM, RestSearchAction.TOTAL_HITS_AS_INT_PARAM);
 
+    private final NamedWriteableRegistry namedWriteableRegistry;
+
+    public RestRollupSearchAction(NamedWriteableRegistry namedWriteableRegistry) {
+        this.namedWriteableRegistry = namedWriteableRegistry;
+    }
+
     @Override
     public List routes() {
         return List.of(
@@ -43,7 +50,7 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient
                 searchRequest,
                 restRequest,
                 parser,
-                client.getNamedWriteableRegistry(),
+                namedWriteableRegistry,
                 size -> searchRequest.source().size(size)
             )
         );
diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshots.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshots.java
index 83a38a4d0b328..b08f31083c973 100644
--- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshots.java
+++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshots.java
@@ -29,6 +29,7 @@
 import org.elasticsearch.cluster.routing.allocation.decider.AllocationDecider;
 import org.elasticsearch.cluster.service.ClusterService;
 import org.elasticsearch.common.Priority;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
 import org.elasticsearch.common.settings.ClusterSettings;
 import org.elasticsearch.common.settings.IndexScopedSettings;
 import org.elasticsearch.common.settings.Setting;
@@ -500,6 +501,7 @@ public Optional getEngineFactory(IndexSettings indexSettings) {
 
     public List getRestHandlers(
         Settings unused,
+        NamedWriteableRegistry namedWriteableRegistry,
         RestController restController,
         ClusterSettings clusterSettings,
         IndexScopedSettings indexScopedSettings,
diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/main/java/org/elasticsearch/xpack/security/operator/OperatorPrivilegesTestPlugin.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/main/java/org/elasticsearch/xpack/security/operator/OperatorPrivilegesTestPlugin.java
index 7842493abbc7a..87ef55b5b8633 100644
--- a/x-pack/plugin/security/qa/operator-privileges-tests/src/main/java/org/elasticsearch/xpack/security/operator/OperatorPrivilegesTestPlugin.java
+++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/main/java/org/elasticsearch/xpack/security/operator/OperatorPrivilegesTestPlugin.java
@@ -9,6 +9,7 @@
 
 import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
 import org.elasticsearch.cluster.node.DiscoveryNodes;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
 import org.elasticsearch.common.settings.ClusterSettings;
 import org.elasticsearch.common.settings.IndexScopedSettings;
 import org.elasticsearch.common.settings.Settings;
@@ -27,6 +28,7 @@ public class OperatorPrivilegesTestPlugin extends Plugin implements ActionPlugin
     @Override
     public List getRestHandlers(
         Settings settings,
+        NamedWriteableRegistry namedWriteableRegistry,
         RestController restController,
         ClusterSettings clusterSettings,
         IndexScopedSettings indexScopedSettings,
diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java
index cc6c659e1478b..c6b441d9cc04f 100644
--- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java
+++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java
@@ -1387,6 +1387,7 @@ public List getActionFilters() {
     @Override
     public List getRestHandlers(
         Settings settings,
+        NamedWriteableRegistry namedWriteableRegistry,
         RestController restController,
         ClusterSettings clusterSettings,
         IndexScopedSettings indexScopedSettings,
diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java
index 1735b9443c78f..6cd12858a12c1 100644
--- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java
+++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java
@@ -806,6 +806,7 @@ public void testSecurityRestHandlerInterceptorCanBeInstalled() throws IllegalAcc
             ActionModule actionModule = new ActionModule(
                 settingsModule.getSettings(),
                 TestIndexNameExpressionResolver.newInstance(threadPool.getThreadContext()),
+                null,
                 settingsModule.getIndexScopedSettings(),
                 settingsModule.getClusterSettings(),
                 settingsModule.getSettingsFilter(),
diff --git a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/ShutdownPlugin.java b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/ShutdownPlugin.java
index 8c85bdb11dfa2..234a77154a641 100644
--- a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/ShutdownPlugin.java
+++ b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/ShutdownPlugin.java
@@ -11,6 +11,7 @@
 import org.elasticsearch.action.support.master.AcknowledgedResponse;
 import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
 import org.elasticsearch.cluster.node.DiscoveryNodes;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
 import org.elasticsearch.common.settings.ClusterSettings;
 import org.elasticsearch.common.settings.IndexScopedSettings;
 import org.elasticsearch.common.settings.Settings;
@@ -55,6 +56,7 @@ public Collection createComponents(PluginServices services) {
     @Override
     public List getRestHandlers(
         Settings settings,
+        NamedWriteableRegistry namedWriteableRegistry,
         RestController restController,
         ClusterSettings clusterSettings,
         IndexScopedSettings indexScopedSettings,
diff --git a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycle.java b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycle.java
index 0233db5af081f..946d9c081658a 100644
--- a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycle.java
+++ b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycle.java
@@ -16,6 +16,7 @@
 import org.elasticsearch.cluster.metadata.Metadata;
 import org.elasticsearch.cluster.node.DiscoveryNodes;
 import org.elasticsearch.cluster.service.ClusterService;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
 import org.elasticsearch.common.settings.ClusterSettings;
 import org.elasticsearch.common.settings.IndexScopedSettings;
 import org.elasticsearch.common.settings.Setting;
@@ -173,6 +174,7 @@ private static List xContentEntries() {
     @Override
     public List getRestHandlers(
         Settings unused,
+        NamedWriteableRegistry namedWriteableRegistry,
         RestController restController,
         ClusterSettings clusterSettings,
         IndexScopedSettings indexScopedSettings,
diff --git a/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/SnapshotRepositoryTestKit.java b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/SnapshotRepositoryTestKit.java
index 96a4d05d2fb4b..fd8970f327ce9 100644
--- a/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/SnapshotRepositoryTestKit.java
+++ b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/SnapshotRepositoryTestKit.java
@@ -11,6 +11,7 @@
 import org.elasticsearch.action.ActionResponse;
 import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
 import org.elasticsearch.cluster.node.DiscoveryNodes;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
 import org.elasticsearch.common.settings.ClusterSettings;
 import org.elasticsearch.common.settings.IndexScopedSettings;
 import org.elasticsearch.common.settings.Settings;
@@ -36,6 +37,7 @@ public class SnapshotRepositoryTestKit extends Plugin implements ActionPlugin {
     @Override
     public List getRestHandlers(
         Settings settings,
+        NamedWriteableRegistry namedWriteableRegistry,
         RestController restController,
         ClusterSettings clusterSettings,
         IndexScopedSettings indexScopedSettings,
diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlPlugin.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlPlugin.java
index c6e0b5067ee08..52a62f4b21d76 100644
--- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlPlugin.java
+++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlPlugin.java
@@ -109,6 +109,7 @@ Collection createComponents(
     @Override
     public List getRestHandlers(
         Settings settings,
+        NamedWriteableRegistry namedWriteableRegistry,
         RestController restController,
         ClusterSettings clusterSettings,
         IndexScopedSettings indexScopedSettings,
diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plugin/SqlPluginTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plugin/SqlPluginTests.java
index 4485c883ca30f..6513d72eaf1f8 100644
--- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plugin/SqlPluginTests.java
+++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plugin/SqlPluginTests.java
@@ -49,6 +49,7 @@ public void testSqlDisabledIsNoOp() {
         assertThat(
             plugin.getRestHandlers(
                 Settings.EMPTY,
+                mock(NamedWriteableRegistry.class),
                 mock(RestController.class),
                 new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS),
                 IndexScopedSettings.DEFAULT_SCOPED_SETTINGS,
diff --git a/x-pack/plugin/text-structure/src/main/java/org/elasticsearch/xpack/textstructure/TextStructurePlugin.java b/x-pack/plugin/text-structure/src/main/java/org/elasticsearch/xpack/textstructure/TextStructurePlugin.java
index 3ba82e6c4513b..c81aa45581b89 100644
--- a/x-pack/plugin/text-structure/src/main/java/org/elasticsearch/xpack/textstructure/TextStructurePlugin.java
+++ b/x-pack/plugin/text-structure/src/main/java/org/elasticsearch/xpack/textstructure/TextStructurePlugin.java
@@ -11,6 +11,7 @@
 import org.elasticsearch.action.ActionResponse;
 import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
 import org.elasticsearch.cluster.node.DiscoveryNodes;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
 import org.elasticsearch.common.settings.ClusterSettings;
 import org.elasticsearch.common.settings.IndexScopedSettings;
 import org.elasticsearch.common.settings.Settings;
@@ -38,6 +39,7 @@ public class TextStructurePlugin extends Plugin implements ActionPlugin {
     @Override
     public List getRestHandlers(
         Settings settings,
+        NamedWriteableRegistry namedWriteableRegistry,
         RestController restController,
         ClusterSettings clusterSettings,
         IndexScopedSettings indexScopedSettings,
diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/Transform.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/Transform.java
index 98c95c5a9803a..15ebc4e852f31 100644
--- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/Transform.java
+++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/Transform.java
@@ -25,6 +25,7 @@
 import org.elasticsearch.cluster.metadata.Metadata;
 import org.elasticsearch.cluster.node.DiscoveryNodes;
 import org.elasticsearch.cluster.service.ClusterService;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
 import org.elasticsearch.common.settings.ClusterSettings;
 import org.elasticsearch.common.settings.IndexScopedSettings;
 import org.elasticsearch.common.settings.Setting;
@@ -166,6 +167,7 @@ protected XPackLicenseState getLicenseState() {
     @Override
     public List getRestHandlers(
         final Settings unused,
+        NamedWriteableRegistry namedWriteableRegistry,
         final RestController restController,
         final ClusterSettings clusterSettings,
         final IndexScopedSettings indexScopedSettings,
diff --git a/x-pack/plugin/vector-tile/src/main/java/org/elasticsearch/xpack/vectortile/VectorTilePlugin.java b/x-pack/plugin/vector-tile/src/main/java/org/elasticsearch/xpack/vectortile/VectorTilePlugin.java
index f902c94e5aa9b..7f6e645b15015 100644
--- a/x-pack/plugin/vector-tile/src/main/java/org/elasticsearch/xpack/vectortile/VectorTilePlugin.java
+++ b/x-pack/plugin/vector-tile/src/main/java/org/elasticsearch/xpack/vectortile/VectorTilePlugin.java
@@ -8,6 +8,7 @@
 
 import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
 import org.elasticsearch.cluster.node.DiscoveryNodes;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
 import org.elasticsearch.common.settings.ClusterSettings;
 import org.elasticsearch.common.settings.IndexScopedSettings;
 import org.elasticsearch.common.settings.Settings;
@@ -33,6 +34,7 @@ protected XPackLicenseState getLicenseState() {
     @Override
     public List getRestHandlers(
         Settings settings,
+        NamedWriteableRegistry namedWriteableRegistry,
         RestController restController,
         ClusterSettings clusterSettings,
         IndexScopedSettings indexScopedSettings,
diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java
index fc1d200c91b82..f107bac568902 100644
--- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java
+++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java
@@ -26,6 +26,7 @@
 import org.elasticsearch.cluster.node.DiscoveryNodes;
 import org.elasticsearch.cluster.service.ClusterService;
 import org.elasticsearch.common.Strings;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
 import org.elasticsearch.common.settings.ClusterSettings;
 import org.elasticsearch.common.settings.IndexScopedSettings;
 import org.elasticsearch.common.settings.Setting;
@@ -690,6 +691,7 @@ static int getWatcherThreadPoolSize(final boolean isDataNode, final int allocate
     @Override
     public List getRestHandlers(
         Settings settings,
+        NamedWriteableRegistry namedWriteableRegistry,
         RestController restController,
         ClusterSettings clusterSettings,
         IndexScopedSettings indexScopedSettings,
diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherPluginTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherPluginTests.java
index d3af489a77a2a..1d93e999a4407 100644
--- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherPluginTests.java
+++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherPluginTests.java
@@ -44,7 +44,7 @@ public void testWatcherDisabledTests() throws Exception {
         List> executorBuilders = watcher.getExecutorBuilders(settings);
         assertThat(executorBuilders, hasSize(0));
         assertThat(watcher.getActions(), hasSize(2));
-        assertThat(watcher.getRestHandlers(settings, null, null, null, null, null, null), hasSize(0));
+        assertThat(watcher.getRestHandlers(settings, null, null, null, null, null, null, null), hasSize(0));
 
         // ensure index module is not called, even if watches index is tried
         IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(Watch.INDEX, settings);
diff --git a/x-pack/qa/freeze-plugin/src/main/java/org/elasticsearch/plugin/freeze/FreezeIndexPlugin.java b/x-pack/qa/freeze-plugin/src/main/java/org/elasticsearch/plugin/freeze/FreezeIndexPlugin.java
index 4fcf096787781..15336286cc2fc 100644
--- a/x-pack/qa/freeze-plugin/src/main/java/org/elasticsearch/plugin/freeze/FreezeIndexPlugin.java
+++ b/x-pack/qa/freeze-plugin/src/main/java/org/elasticsearch/plugin/freeze/FreezeIndexPlugin.java
@@ -13,6 +13,7 @@
 import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
 import org.elasticsearch.cluster.node.DiscoveryNodes;
 import org.elasticsearch.common.Strings;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
 import org.elasticsearch.common.settings.ClusterSettings;
 import org.elasticsearch.common.settings.IndexScopedSettings;
 import org.elasticsearch.common.settings.Settings;
@@ -42,6 +43,7 @@ public class FreezeIndexPlugin extends Plugin implements ActionPlugin {
     @Override
     public List getRestHandlers(
         Settings settings,
+        NamedWriteableRegistry namedWriteableRegistry,
         RestController restController,
         ClusterSettings clusterSettings,
         IndexScopedSettings indexScopedSettings,

From ff259c66cf6ccf7b52aa76cb7af5e4646f1ed819 Mon Sep 17 00:00:00 2001
From: David Turner 
Date: Thu, 11 Jan 2024 12:47:00 +0000
Subject: [PATCH 40/75] Encapsulate recoverLocallyUpToGlobalCheckpoint in tests
 (#104258)

We call `IndexShard#recoverLocallyUpToGlobalCheckpoint` in a synchronous
way in various tests. This commit encapsulates all these calls in a
utility method so that we can adapt it into an async API without masses
of noise.
---
 .../indices/recovery/RecoveryState.java            |  2 +-
 .../recovery/PeerRecoveryTargetServiceTests.java   | 14 +++++++-------
 .../index/shard/IndexShardTestCase.java            |  6 +++++-
 3 files changed, 13 insertions(+), 9 deletions(-)

diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryState.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryState.java
index acc33f5d85ea4..b1590a282fc8d 100644
--- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryState.java
+++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryState.java
@@ -580,7 +580,7 @@ public synchronized void totalOperationsOnStart(int total) {
 
         /**
          * Sets the total number of translog operations to be recovered locally before performing peer recovery
-         * @see IndexShard#recoverLocallyUpToGlobalCheckpoint()
+         * @see IndexShard#recoverLocallyUpToGlobalCheckpoint
          */
         public synchronized void totalLocal(int totalLocal) {
             assert totalLocal >= recovered : totalLocal + " < " + recovered;
diff --git a/server/src/test/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetServiceTests.java b/server/src/test/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetServiceTests.java
index 0317a6baf040a..d27e924110c15 100644
--- a/server/src/test/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetServiceTests.java
+++ b/server/src/test/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetServiceTests.java
@@ -211,7 +211,7 @@ public void testPrepareIndexForPeerRecovery() throws Exception {
         IndexShard shard = newShard(false);
         shard.markAsRecovering("for testing", new RecoveryState(shard.routingEntry(), localNode, localNode));
         shard.prepareForIndexRecovery();
-        assertThat(shard.recoverLocallyUpToGlobalCheckpoint(), equalTo(UNASSIGNED_SEQ_NO));
+        assertThat(recoverLocallyUpToGlobalCheckpoint(shard), equalTo(UNASSIGNED_SEQ_NO));
         assertThat(shard.recoveryState().getTranslog().totalLocal(), equalTo(RecoveryState.Translog.UNKNOWN));
         assertThat(shard.recoveryState().getTranslog().recoveredOperations(), equalTo(0));
         assertThat(shard.getLastKnownGlobalCheckpoint(), equalTo(UNASSIGNED_SEQ_NO));
@@ -239,7 +239,7 @@ public void testPrepareIndexForPeerRecovery() throws Exception {
         );
         replica.markAsRecovering("for testing", new RecoveryState(replica.routingEntry(), localNode, localNode));
         replica.prepareForIndexRecovery();
-        assertThat(replica.recoverLocallyUpToGlobalCheckpoint(), equalTo(globalCheckpoint + 1));
+        assertThat(recoverLocallyUpToGlobalCheckpoint(replica), equalTo(globalCheckpoint + 1));
         assertThat(replica.recoveryState().getTranslog().totalLocal(), equalTo(expectedTotalLocal));
         assertThat(replica.recoveryState().getTranslog().recoveredOperations(), equalTo(expectedTotalLocal));
         assertThat(replica.getLastKnownGlobalCheckpoint(), equalTo(UNASSIGNED_SEQ_NO));
@@ -254,7 +254,7 @@ public void testPrepareIndexForPeerRecovery() throws Exception {
         replica = reinitShard(shard, ShardRoutingHelper.initWithSameId(shard.routingEntry(), RecoverySource.PeerRecoverySource.INSTANCE));
         replica.markAsRecovering("for testing", new RecoveryState(replica.routingEntry(), localNode, localNode));
         replica.prepareForIndexRecovery();
-        assertThat(replica.recoverLocallyUpToGlobalCheckpoint(), equalTo(UNASSIGNED_SEQ_NO));
+        assertThat(recoverLocallyUpToGlobalCheckpoint(replica), equalTo(UNASSIGNED_SEQ_NO));
         assertThat(replica.recoveryState().getTranslog().totalLocal(), equalTo(RecoveryState.Translog.UNKNOWN));
         assertThat(replica.recoveryState().getTranslog().recoveredOperations(), equalTo(0));
         assertThat(replica.getLastKnownGlobalCheckpoint(), equalTo(UNASSIGNED_SEQ_NO));
@@ -276,10 +276,10 @@ public void testPrepareIndexForPeerRecovery() throws Exception {
         replica.markAsRecovering("for testing", new RecoveryState(replica.routingEntry(), localNode, localNode));
         replica.prepareForIndexRecovery();
         if (safeCommit.isPresent()) {
-            assertThat(replica.recoverLocallyUpToGlobalCheckpoint(), equalTo(safeCommit.get().localCheckpoint + 1));
+            assertThat(recoverLocallyUpToGlobalCheckpoint(replica), equalTo(safeCommit.get().localCheckpoint + 1));
             assertThat(replica.recoveryState().getTranslog().totalLocal(), equalTo(0));
         } else {
-            assertThat(replica.recoverLocallyUpToGlobalCheckpoint(), equalTo(UNASSIGNED_SEQ_NO));
+            assertThat(recoverLocallyUpToGlobalCheckpoint(replica), equalTo(UNASSIGNED_SEQ_NO));
             assertThat(replica.recoveryState().getTranslog().totalLocal(), equalTo(RecoveryState.Translog.UNKNOWN));
         }
         assertThat(replica.recoveryState().getStage(), equalTo(RecoveryState.Stage.TRANSLOG));
@@ -313,7 +313,7 @@ public void testClosedIndexSkipsLocalRecovery() throws Exception {
         );
         replica.markAsRecovering("for testing", new RecoveryState(replica.routingEntry(), localNode, localNode));
         replica.prepareForIndexRecovery();
-        assertThat(replica.recoverLocallyUpToGlobalCheckpoint(), equalTo(safeCommit.get().localCheckpoint + 1));
+        assertThat(recoverLocallyUpToGlobalCheckpoint(replica), equalTo(safeCommit.get().localCheckpoint + 1));
         assertThat(replica.recoveryState().getTranslog().totalLocal(), equalTo(0));
         assertThat(replica.recoveryState().getTranslog().recoveredOperations(), equalTo(0));
         assertThat(replica.getLastKnownGlobalCheckpoint(), equalTo(UNASSIGNED_SEQ_NO));
@@ -328,7 +328,7 @@ public void testResetStartingSeqNoIfLastCommitCorrupted() throws Exception {
         shard = reinitShard(shard, ShardRoutingHelper.initWithSameId(shard.routingEntry(), RecoverySource.PeerRecoverySource.INSTANCE));
         shard.markAsRecovering("peer recovery", new RecoveryState(shard.routingEntry(), pNode, rNode));
         shard.prepareForIndexRecovery();
-        long startingSeqNo = shard.recoverLocallyUpToGlobalCheckpoint();
+        long startingSeqNo = recoverLocallyUpToGlobalCheckpoint(shard);
         shard.store().markStoreCorrupted(new IOException("simulated"));
         RecoveryTarget recoveryTarget = new RecoveryTarget(shard, null, 0L, null, null, null);
         StartRecoveryRequest request = PeerRecoveryTargetService.getStartRecoveryRequest(logger, rNode, recoveryTarget, startingSeqNo);
diff --git a/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java
index fb222f67eef69..1d0a476369be3 100644
--- a/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java
+++ b/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java
@@ -811,7 +811,7 @@ protected final void recoverUnstartedReplica(
         }
         replica.prepareForIndexRecovery();
         final RecoveryTarget recoveryTarget = targetSupplier.apply(replica, pNode);
-        final long startingSeqNo = recoveryTarget.indexShard().recoverLocallyUpToGlobalCheckpoint();
+        final long startingSeqNo = recoverLocallyUpToGlobalCheckpoint(recoveryTarget.indexShard());
         final StartRecoveryRequest request = PeerRecoveryTargetService.getStartRecoveryRequest(
             logger,
             rNode,
@@ -1158,4 +1158,8 @@ public static Engine.Warmer createTestWarmer(IndexSettings indexSettings) {
             }
         };
     }
+
+    public static long recoverLocallyUpToGlobalCheckpoint(IndexShard indexShard) {
+        return indexShard.recoverLocallyUpToGlobalCheckpoint();
+    }
 }

From a8d0e9c71dc63bd2f2c18780f6e4fb1b70f42d00 Mon Sep 17 00:00:00 2001
From: Simon Cooper 
Date: Thu, 11 Jan 2024 12:56:14 +0000
Subject: [PATCH 41/75] Update after merge conflict on #103277

---
 .../elasticsearch/test/esql/heap_attack/HeapAttackPlugin.java   | 2 ++
 1 file changed, 2 insertions(+)

diff --git a/test/external-modules/esql-heap-attack/src/main/java/org/elasticsearch/test/esql/heap_attack/HeapAttackPlugin.java b/test/external-modules/esql-heap-attack/src/main/java/org/elasticsearch/test/esql/heap_attack/HeapAttackPlugin.java
index a1bf71070f3c8..77e0c3b3e0821 100644
--- a/test/external-modules/esql-heap-attack/src/main/java/org/elasticsearch/test/esql/heap_attack/HeapAttackPlugin.java
+++ b/test/external-modules/esql-heap-attack/src/main/java/org/elasticsearch/test/esql/heap_attack/HeapAttackPlugin.java
@@ -17,6 +17,7 @@
 
 import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
 import org.elasticsearch.cluster.node.DiscoveryNodes;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
 import org.elasticsearch.common.settings.ClusterSettings;
 import org.elasticsearch.common.settings.IndexScopedSettings;
 import org.elasticsearch.common.settings.Settings;
@@ -33,6 +34,7 @@ public class HeapAttackPlugin extends Plugin implements ActionPlugin {
     @Override
     public List getRestHandlers(
         Settings settings,
+        NamedWriteableRegistry namedWriteableRegistry,
         RestController restController,
         ClusterSettings clusterSettings,
         IndexScopedSettings indexScopedSettings,

From a20d01950712269668c618b98827a2f7d3b39657 Mon Sep 17 00:00:00 2001
From: David Turner 
Date: Thu, 11 Jan 2024 13:04:40 +0000
Subject: [PATCH 42/75] Clean up start-recovery sequence (#104254)

It's a little hard to see that `IndexShard#preRecovery` happens before
the rest of the pre-recovery bootstrapping steps, and it's quite a noisy
change to make `IndexShard#recoverLocallyUpToGlobalCheckpoint` async
because of how this is written today. This commit refactors things to
show the steps in order and set up for an asyncification follow-up.
---
 .../recovery/PeerRecoveryTargetService.java   | 45 +++++++++++--------
 1 file changed, 26 insertions(+), 19 deletions(-)

diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java b/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java
index 61545ada107b6..487632546db21 100644
--- a/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java
+++ b/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java
@@ -19,6 +19,7 @@
 import org.elasticsearch.action.ActionResponse;
 import org.elasticsearch.action.ActionRunnable;
 import org.elasticsearch.action.support.ChannelActionListener;
+import org.elasticsearch.action.support.SubscribableListener;
 import org.elasticsearch.client.internal.Client;
 import org.elasticsearch.cluster.ClusterState;
 import org.elasticsearch.cluster.ClusterStateObserver;
@@ -375,26 +376,32 @@ record StartRecoveryRequestToSend(StartRecoveryRequest startRecoveryRequest, Str
         });
 
         if (preExistingRequest == null) {
-            ActionListener.run(toSendListener.map(v -> {
-                logger.trace("{} preparing shard for peer recovery", recoveryTarget.shardId());
-                indexShard.prepareForIndexRecovery();
-                if (indexShard.indexSettings().getIndexMetadata().isSearchableSnapshot()) {
-                    // for searchable snapshots, peer recovery is treated similarly to recovery from snapshot
-                    indexShard.getIndexEventListener().afterFilesRestoredFromRepository(indexShard);
-                    final Store store = indexShard.store();
-                    store.incRef();
-                    try {
-                        StoreRecovery.bootstrap(indexShard, store);
-                    } finally {
-                        store.decRef();
+            SubscribableListener
+                // run pre-recovery activities
+                .newForked(indexShard::preRecovery)
+                // recover the shard locally and construct the start-recovery request
+                .andThenApply(v -> {
+                    logger.trace("{} preparing shard for peer recovery", recoveryTarget.shardId());
+                    indexShard.prepareForIndexRecovery();
+                    if (indexShard.indexSettings().getIndexMetadata().isSearchableSnapshot()) {
+                        // for searchable snapshots, peer recovery is treated similarly to recovery from snapshot
+                        indexShard.getIndexEventListener().afterFilesRestoredFromRepository(indexShard);
+                        final Store store = indexShard.store();
+                        store.incRef();
+                        try {
+                            StoreRecovery.bootstrap(indexShard, store);
+                        } finally {
+                            store.decRef();
+                        }
                     }
-                }
-                final long startingSeqNo = indexShard.recoverLocallyUpToGlobalCheckpoint();
-                assert startingSeqNo == UNASSIGNED_SEQ_NO || recoveryTarget.state().getStage() == RecoveryState.Stage.TRANSLOG
-                    : "unexpected recovery stage [" + recoveryTarget.state().getStage() + "] starting seqno [ " + startingSeqNo + "]";
-                final var startRequest = getStartRecoveryRequest(logger, clusterService.localNode(), recoveryTarget, startingSeqNo);
-                return new StartRecoveryRequestToSend(startRequest, PeerRecoverySourceService.Actions.START_RECOVERY, startRequest);
-            }), indexShard::preRecovery);
+                    final long startingSeqNo = indexShard.recoverLocallyUpToGlobalCheckpoint();
+                    assert startingSeqNo == UNASSIGNED_SEQ_NO || recoveryTarget.state().getStage() == RecoveryState.Stage.TRANSLOG
+                        : "unexpected recovery stage [" + recoveryTarget.state().getStage() + "] starting seqno [ " + startingSeqNo + "]";
+                    final var startRequest = getStartRecoveryRequest(logger, clusterService.localNode(), recoveryTarget, startingSeqNo);
+                    return new StartRecoveryRequestToSend(startRequest, PeerRecoverySourceService.Actions.START_RECOVERY, startRequest);
+                })
+                // finally send the start-recovery request
+                .addListener(toSendListener);
         } else {
             toSendListener.onResponse(
                 new StartRecoveryRequestToSend(

From f64147f6c1fe7ee49c0635810ecffee4b2857460 Mon Sep 17 00:00:00 2001
From: Ignacio Vera 
Date: Thu, 11 Jan 2024 14:25:48 +0100
Subject: [PATCH 43/75] ESQL: Remove the possibility of CartesianPoint or
 GeoPoint literal to a long (#104197)

---
 .../esql/functions/types/to_long.asciidoc     |   2 -
 .../src/main/resources/show.csv-spec          |   4 +-
 .../ToLongFromCartesianPointEvaluator.java    | 111 ------------------
 .../convert/ToLongFromGeoPointEvaluator.java  | 111 ------------------
 .../function/scalar/convert/ToLong.java       |  21 +---
 .../function/scalar/convert/ToLongTests.java  |  12 --
 6 files changed, 3 insertions(+), 258 deletions(-)
 delete mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromCartesianPointEvaluator.java
 delete mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromGeoPointEvaluator.java

diff --git a/docs/reference/esql/functions/types/to_long.asciidoc b/docs/reference/esql/functions/types/to_long.asciidoc
index 5c063739fc5b1..307f573f1db2d 100644
--- a/docs/reference/esql/functions/types/to_long.asciidoc
+++ b/docs/reference/esql/functions/types/to_long.asciidoc
@@ -2,10 +2,8 @@
 |===
 v | result
 boolean | long
-cartesian_point | long
 datetime | long
 double | long
-geo_point | long
 integer | long
 keyword | long
 long | long
diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec
index 5e78ee76c046c..891b40cc95e50 100644
--- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec
+++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec
@@ -84,7 +84,7 @@ to_geopoint              |"geo_point to_geopoint(v:geo_point|keyword|text)"
 to_int                   |"integer to_int(v:boolean|date|keyword|text|double|long|unsigned_long|integer)"                  |v   |"boolean|date|keyword|text|double|long|unsigned_long|integer"                                              |                                                    |integer                          | "Converts an input value to an integer value."                                                                                      |false                       |false           | false
 to_integer               |"integer to_integer(v:boolean|date|keyword|text|double|long|unsigned_long|integer)"              |v   |"boolean|date|keyword|text|double|long|unsigned_long|integer"                                              |                                                    |integer                          | "Converts an input value to an integer value."                                                                                      |false                       |false           | false
 to_ip                    |"ip to_ip(v:ip|keyword|text)"                                                                    |v   |"ip|keyword|text"                                                                                          |                                                    |ip                               | "Converts an input string to an IP value."                                                                                      |false                       |false           | false
-to_long                  |"long to_long(v:boolean|date|keyword|text|double|long|unsigned_long|integer|geo_point|cartesian_point)"                               |v                         |"boolean|date|keyword|text|double|long|unsigned_long|integer|geo_point|cartesian_point"                     |                                                    |long                             | "Converts an input value to a long value."                                                                                      |false                       |false | false
+to_long                  |"long to_long(v:boolean|date|keyword|text|double|long|unsigned_long|integer)"                    |v   |"boolean|date|keyword|text|double|long|unsigned_long|integer"                                              |                                                    |long                             | "Converts an input value to a long value."                                                                                      |false                       |false | false
 to_radians               |"double to_radians(v:double|integer|long|unsigned_long)"                                         |v   |"double|integer|long|unsigned_long"                                                                        |                                                    |double                           | "Converts a number in degrees to radians."                                                                                      |false                       |false | false
 to_str                   |"keyword to_str(v:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)"                  |v                         |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version"          |                                                    |keyword                          | "Converts a field into a string."                                                                                      |false                       |false | false
 to_string                |"keyword to_string(v:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)"               |v                         |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version"          |                                                    |keyword                          | "Converts a field into a string."                                                                                      |false                       |false | false
@@ -175,7 +175,7 @@ double tau()
 "integer to_int(v:boolean|date|keyword|text|double|long|unsigned_long|integer)"
 "integer to_integer(v:boolean|date|keyword|text|double|long|unsigned_long|integer)"
 "ip to_ip(v:ip|keyword|text)"
-"long to_long(v:boolean|date|keyword|text|double|long|unsigned_long|integer|geo_point|cartesian_point)"
+"long to_long(v:boolean|date|keyword|text|double|long|unsigned_long|integer)"
 "double to_radians(v:double|integer|long|unsigned_long)"
 "keyword to_str(v:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)"
 "keyword to_string(v:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)"
diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromCartesianPointEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromCartesianPointEvaluator.java
deleted file mode 100644
index 5f424bc4e568b..0000000000000
--- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromCartesianPointEvaluator.java
+++ /dev/null
@@ -1,111 +0,0 @@
-// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
-// or more contributor license agreements. Licensed under the Elastic License
-// 2.0; you may not use this file except in compliance with the Elastic License
-// 2.0.
-package org.elasticsearch.xpack.esql.expression.function.scalar.convert;
-
-import java.lang.Override;
-import java.lang.String;
-import org.apache.lucene.util.BytesRef;
-import org.elasticsearch.compute.data.Block;
-import org.elasticsearch.compute.data.BytesRefBlock;
-import org.elasticsearch.compute.data.BytesRefVector;
-import org.elasticsearch.compute.data.LongBlock;
-import org.elasticsearch.compute.data.Vector;
-import org.elasticsearch.compute.operator.DriverContext;
-import org.elasticsearch.compute.operator.EvalOperator;
-import org.elasticsearch.xpack.ql.tree.Source;
-
-/**
- * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToLong}.
- * This class is generated. Do not edit it.
- */
-public final class ToLongFromCartesianPointEvaluator extends AbstractConvertFunction.AbstractEvaluator {
-  public ToLongFromCartesianPointEvaluator(EvalOperator.ExpressionEvaluator field, Source source,
-      DriverContext driverContext) {
-    super(driverContext, field, source);
-  }
-
-  @Override
-  public String name() {
-    return "ToLongFromCartesianPoint";
-  }
-
-  @Override
-  public Block evalVector(Vector v) {
-    BytesRefVector vector = (BytesRefVector) v;
-    int positionCount = v.getPositionCount();
-    BytesRef scratchPad = new BytesRef();
-    if (vector.isConstant()) {
-      return driverContext.blockFactory().newConstantLongBlockWith(evalValue(vector, 0, scratchPad), positionCount);
-    }
-    try (LongBlock.Builder builder = driverContext.blockFactory().newLongBlockBuilder(positionCount)) {
-      for (int p = 0; p < positionCount; p++) {
-        builder.appendLong(evalValue(vector, p, scratchPad));
-      }
-      return builder.build();
-    }
-  }
-
-  private static long evalValue(BytesRefVector container, int index, BytesRef scratchPad) {
-    BytesRef value = container.getBytesRef(index, scratchPad);
-    return ToLong.fromCartesianPoint(value);
-  }
-
-  @Override
-  public Block evalBlock(Block b) {
-    BytesRefBlock block = (BytesRefBlock) b;
-    int positionCount = block.getPositionCount();
-    try (LongBlock.Builder builder = driverContext.blockFactory().newLongBlockBuilder(positionCount)) {
-      BytesRef scratchPad = new BytesRef();
-      for (int p = 0; p < positionCount; p++) {
-        int valueCount = block.getValueCount(p);
-        int start = block.getFirstValueIndex(p);
-        int end = start + valueCount;
-        boolean positionOpened = false;
-        boolean valuesAppended = false;
-        for (int i = start; i < end; i++) {
-          long value = evalValue(block, i, scratchPad);
-          if (positionOpened == false && valueCount > 1) {
-            builder.beginPositionEntry();
-            positionOpened = true;
-          }
-          builder.appendLong(value);
-          valuesAppended = true;
-        }
-        if (valuesAppended == false) {
-          builder.appendNull();
-        } else if (positionOpened) {
-          builder.endPositionEntry();
-        }
-      }
-      return builder.build();
-    }
-  }
-
-  private static long evalValue(BytesRefBlock container, int index, BytesRef scratchPad) {
-    BytesRef value = container.getBytesRef(index, scratchPad);
-    return ToLong.fromCartesianPoint(value);
-  }
-
-  public static class Factory implements EvalOperator.ExpressionEvaluator.Factory {
-    private final Source source;
-
-    private final EvalOperator.ExpressionEvaluator.Factory field;
-
-    public Factory(EvalOperator.ExpressionEvaluator.Factory field, Source source) {
-      this.field = field;
-      this.source = source;
-    }
-
-    @Override
-    public ToLongFromCartesianPointEvaluator get(DriverContext context) {
-      return new ToLongFromCartesianPointEvaluator(field.get(context), source, context);
-    }
-
-    @Override
-    public String toString() {
-      return "ToLongFromCartesianPointEvaluator[field=" + field + "]";
-    }
-  }
-}
diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromGeoPointEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromGeoPointEvaluator.java
deleted file mode 100644
index e85f2191023fe..0000000000000
--- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromGeoPointEvaluator.java
+++ /dev/null
@@ -1,111 +0,0 @@
-// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
-// or more contributor license agreements. Licensed under the Elastic License
-// 2.0; you may not use this file except in compliance with the Elastic License
-// 2.0.
-package org.elasticsearch.xpack.esql.expression.function.scalar.convert;
-
-import java.lang.Override;
-import java.lang.String;
-import org.apache.lucene.util.BytesRef;
-import org.elasticsearch.compute.data.Block;
-import org.elasticsearch.compute.data.BytesRefBlock;
-import org.elasticsearch.compute.data.BytesRefVector;
-import org.elasticsearch.compute.data.LongBlock;
-import org.elasticsearch.compute.data.Vector;
-import org.elasticsearch.compute.operator.DriverContext;
-import org.elasticsearch.compute.operator.EvalOperator;
-import org.elasticsearch.xpack.ql.tree.Source;
-
-/**
- * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToLong}.
- * This class is generated. Do not edit it.
- */
-public final class ToLongFromGeoPointEvaluator extends AbstractConvertFunction.AbstractEvaluator {
-  public ToLongFromGeoPointEvaluator(EvalOperator.ExpressionEvaluator field, Source source,
-      DriverContext driverContext) {
-    super(driverContext, field, source);
-  }
-
-  @Override
-  public String name() {
-    return "ToLongFromGeoPoint";
-  }
-
-  @Override
-  public Block evalVector(Vector v) {
-    BytesRefVector vector = (BytesRefVector) v;
-    int positionCount = v.getPositionCount();
-    BytesRef scratchPad = new BytesRef();
-    if (vector.isConstant()) {
-      return driverContext.blockFactory().newConstantLongBlockWith(evalValue(vector, 0, scratchPad), positionCount);
-    }
-    try (LongBlock.Builder builder = driverContext.blockFactory().newLongBlockBuilder(positionCount)) {
-      for (int p = 0; p < positionCount; p++) {
-        builder.appendLong(evalValue(vector, p, scratchPad));
-      }
-      return builder.build();
-    }
-  }
-
-  private static long evalValue(BytesRefVector container, int index, BytesRef scratchPad) {
-    BytesRef value = container.getBytesRef(index, scratchPad);
-    return ToLong.fromGeoPoint(value);
-  }
-
-  @Override
-  public Block evalBlock(Block b) {
-    BytesRefBlock block = (BytesRefBlock) b;
-    int positionCount = block.getPositionCount();
-    try (LongBlock.Builder builder = driverContext.blockFactory().newLongBlockBuilder(positionCount)) {
-      BytesRef scratchPad = new BytesRef();
-      for (int p = 0; p < positionCount; p++) {
-        int valueCount = block.getValueCount(p);
-        int start = block.getFirstValueIndex(p);
-        int end = start + valueCount;
-        boolean positionOpened = false;
-        boolean valuesAppended = false;
-        for (int i = start; i < end; i++) {
-          long value = evalValue(block, i, scratchPad);
-          if (positionOpened == false && valueCount > 1) {
-            builder.beginPositionEntry();
-            positionOpened = true;
-          }
-          builder.appendLong(value);
-          valuesAppended = true;
-        }
-        if (valuesAppended == false) {
-          builder.appendNull();
-        } else if (positionOpened) {
-          builder.endPositionEntry();
-        }
-      }
-      return builder.build();
-    }
-  }
-
-  private static long evalValue(BytesRefBlock container, int index, BytesRef scratchPad) {
-    BytesRef value = container.getBytesRef(index, scratchPad);
-    return ToLong.fromGeoPoint(value);
-  }
-
-  public static class Factory implements EvalOperator.ExpressionEvaluator.Factory {
-    private final Source source;
-
-    private final EvalOperator.ExpressionEvaluator.Factory field;
-
-    public Factory(EvalOperator.ExpressionEvaluator.Factory field, Source source) {
-      this.field = field;
-      this.source = source;
-    }
-
-    @Override
-    public ToLongFromGeoPointEvaluator get(DriverContext context) {
-      return new ToLongFromGeoPointEvaluator(field.get(context), source, context);
-    }
-
-    @Override
-    public String toString() {
-      return "ToLongFromGeoPointEvaluator[field=" + field + "]";
-    }
-  }
-}
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLong.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLong.java
index e34d226f01292..ee7658b07b7f9 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLong.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLong.java
@@ -20,8 +20,6 @@
 import java.util.List;
 import java.util.Map;
 
-import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.CARTESIAN_POINT;
-import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_POINT;
 import static org.elasticsearch.xpack.ql.type.DataTypeConverter.safeDoubleToLong;
 import static org.elasticsearch.xpack.ql.type.DataTypeConverter.safeToLong;
 import static org.elasticsearch.xpack.ql.type.DataTypes.BOOLEAN;
@@ -33,16 +31,12 @@
 import static org.elasticsearch.xpack.ql.type.DataTypes.TEXT;
 import static org.elasticsearch.xpack.ql.type.DataTypes.UNSIGNED_LONG;
 import static org.elasticsearch.xpack.ql.util.NumericUtils.unsignedLongAsNumber;
-import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.CARTESIAN;
-import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.GEO;
 
 public class ToLong extends AbstractConvertFunction {
 
     private static final Map EVALUATORS = Map.ofEntries(
         Map.entry(LONG, (fieldEval, source) -> fieldEval),
         Map.entry(DATETIME, (fieldEval, source) -> fieldEval),
-        Map.entry(GEO_POINT, ToLongFromGeoPointEvaluator.Factory::new),
-        Map.entry(CARTESIAN_POINT, ToLongFromCartesianPointEvaluator.Factory::new),
         Map.entry(BOOLEAN, ToLongFromBooleanEvaluator.Factory::new),
         Map.entry(KEYWORD, ToLongFromStringEvaluator.Factory::new),
         Map.entry(TEXT, ToLongFromStringEvaluator.Factory::new),
@@ -54,10 +48,7 @@ public class ToLong extends AbstractConvertFunction {
     @FunctionInfo(returnType = "long", description = "Converts an input value to a long value.")
     public ToLong(
         Source source,
-        @Param(
-            name = "v",
-            type = { "boolean", "date", "keyword", "text", "double", "long", "unsigned_long", "integer", "geo_point", "cartesian_point" }
-        ) Expression field
+        @Param(name = "v", type = { "boolean", "date", "keyword", "text", "double", "long", "unsigned_long", "integer" }) Expression field
     ) {
         super(source, field);
     }
@@ -115,14 +106,4 @@ static long fromUnsignedLong(long ul) {
     static long fromInt(int i) {
         return i;
     }
-
-    @ConvertEvaluator(extraName = "FromGeoPoint")
-    static long fromGeoPoint(BytesRef wkb) {
-        return GEO.wkbAsLong(wkb);
-    }
-
-    @ConvertEvaluator(extraName = "FromCartesianPoint")
-    static long fromCartesianPoint(BytesRef wkb) {
-        return CARTESIAN.wkbAsLong(wkb);
-    }
 }
diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongTests.java
index 1c2488c8e9cb5..030c219b75e2f 100644
--- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongTests.java
+++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongTests.java
@@ -24,9 +24,6 @@
 import java.util.function.Function;
 import java.util.function.Supplier;
 
-import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.CARTESIAN;
-import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.GEO;
-
 public class ToLongTests extends AbstractFunctionTestCase {
     public ToLongTests(@Name("TestCase") Supplier testCaseSupplier) {
         this.testCase = testCaseSupplier.get();
@@ -43,15 +40,6 @@ public static Iterable parameters() {
 
         TestCaseSupplier.forUnaryBoolean(suppliers, evaluatorName.apply("Boolean"), DataTypes.LONG, b -> b ? 1L : 0L, List.of());
 
-        // geo types
-        TestCaseSupplier.forUnaryGeoPoint(suppliers, evaluatorName.apply("GeoPoint"), DataTypes.LONG, GEO::wkbAsLong, List.of());
-        TestCaseSupplier.forUnaryCartesianPoint(
-            suppliers,
-            evaluatorName.apply("CartesianPoint"),
-            DataTypes.LONG,
-            CARTESIAN::wkbAsLong,
-            List.of()
-        );
         // datetimes
         TestCaseSupplier.forUnaryDatetime(suppliers, read, DataTypes.LONG, Instant::toEpochMilli, List.of());
         // random strings that don't look like a long

From 14f87ca7938ef8fdf6427fdc84a770629e6ecb7b Mon Sep 17 00:00:00 2001
From: Marco Liberati 
Date: Thu, 11 Jan 2024 15:08:16 +0100
Subject: [PATCH 44/75] [ES|QL] Make type and description annotations required
 (#104249)

---
 .../function/AbstractFunctionTestCase.java          | 13 +++++--------
 1 file changed, 5 insertions(+), 8 deletions(-)

diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java
index 34da3725056cb..e3ff92000ab21 100644
--- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java
+++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java
@@ -596,6 +596,8 @@ public static void testFunctionInfo() {
         EsqlFunctionRegistry.FunctionDescription description = EsqlFunctionRegistry.description(definition);
         List args = description.args();
 
+        assertTrue("expect description to be defined", description.description() != null && description.description().length() > 0);
+
         List> typesFromSignature = new ArrayList<>();
         Set returnFromSignature = new HashSet<>();
         for (int i = 0; i < args.size(); i++) {
@@ -611,21 +613,16 @@ public static void testFunctionInfo() {
 
         for (int i = 0; i < args.size(); i++) {
             Set annotationTypes = Arrays.stream(args.get(i).type()).collect(Collectors.toCollection(() -> new TreeSet<>()));
-            if (annotationTypes.equals(Set.of("?"))) {
-                continue; // TODO remove this eventually, so that all the functions will have to provide signature info
-            }
             Set signatureTypes = typesFromSignature.get(i);
             if (signatureTypes.isEmpty()) {
                 continue;
             }
-            assertEquals(annotationTypes, signatureTypes);
+            assertEquals(signatureTypes, annotationTypes);
         }
 
         Set returnTypes = Arrays.stream(description.returnType()).collect(Collectors.toCollection(() -> new TreeSet<>()));
-        if (returnTypes.equals(Set.of("?")) == false) {
-            // TODO remove this eventually, so that all the functions will have to provide signature info
-            assertEquals(returnTypes, returnFromSignature);
-        }
+        assertEquals(returnFromSignature, returnTypes);
+
     }
 
     /**

From b7b3831fb96e42c933da85fe2ed2c4f539fe70e5 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Francisco=20Fern=C3=A1ndez=20Casta=C3=B1o?=
 
Date: Thu, 11 Jan 2024 15:10:15 +0100
Subject: [PATCH 45/75] Fix cancellation order in
 CancellableRateLimitedFluxIterator (#104259)

Closes #103054
---
 docs/changelog/104259.yaml                                  | 6 ++++++
 .../azure/CancellableRateLimitedFluxIterator.java           | 4 ++--
 2 files changed, 8 insertions(+), 2 deletions(-)
 create mode 100644 docs/changelog/104259.yaml

diff --git a/docs/changelog/104259.yaml b/docs/changelog/104259.yaml
new file mode 100644
index 0000000000000..461d6f039136a
--- /dev/null
+++ b/docs/changelog/104259.yaml
@@ -0,0 +1,6 @@
+pr: 104259
+summary: Fix cancellation order in `CancellableRateLimitedFluxIterator`
+area: Snapshot/Restore
+type: bug
+issues:
+ - 103054
diff --git a/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/CancellableRateLimitedFluxIterator.java b/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/CancellableRateLimitedFluxIterator.java
index 2dff8a10d39f7..ed2e0bf9be0b2 100644
--- a/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/CancellableRateLimitedFluxIterator.java
+++ b/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/CancellableRateLimitedFluxIterator.java
@@ -165,9 +165,9 @@ public void onNext(T element) {
     }
 
     public void cancel() {
+        done = true;
         cancelSubscription();
         clearQueue();
-        done = true;
         // cancel should be called from the consumer
         // thread, but to avoid potential deadlocks
         // we just try to release a possibly blocked
@@ -177,9 +177,9 @@ public void cancel() {
 
     @Override
     public void onError(Throwable t) {
+        done = true;
         clearQueue();
         error = t;
-        done = true;
         signalConsumer();
     }
 

From 31e89890de1d034f9f7171be0251b8b5fc831e3e Mon Sep 17 00:00:00 2001
From: Max Hniebergall <137079448+maxhniebergall@users.noreply.github.com>
Date: Thu, 11 Jan 2024 10:32:45 -0500
Subject: [PATCH 46/75] [ML] Undeploy elser when inference model deleted
 (#104230)

* Added stop top InferenceService interface and Elser

* New integration tests

* undeploy ELSER deployment when _inf ELSER model deleted

* Update docs/changelog/104230.yaml

* Added check for platform architecture in integration test

* improvements from PR comments
---
 docs/changelog/104230.yaml                    |  5 +++
 .../inference/InferenceService.java           | 10 +++++
 .../inference/InferenceBaseRestTest.java      | 36 ++++++++++++++++++
 .../xpack/inference/InferenceCrudIT.java      | 37 +++++++++++++++++++
 .../TransportDeleteInferenceModelAction.java  | 33 ++++++++++++++++-
 .../services/elser/ElserMlNodeService.java    | 12 ++++++
 6 files changed, 131 insertions(+), 2 deletions(-)
 create mode 100644 docs/changelog/104230.yaml

diff --git a/docs/changelog/104230.yaml b/docs/changelog/104230.yaml
new file mode 100644
index 0000000000000..94184f64586f5
--- /dev/null
+++ b/docs/changelog/104230.yaml
@@ -0,0 +1,5 @@
+pr: 104230
+summary: Undeploy elser when inference model deleted
+area: Machine Learning
+type: bug
+issues: []
diff --git a/server/src/main/java/org/elasticsearch/inference/InferenceService.java b/server/src/main/java/org/elasticsearch/inference/InferenceService.java
index b6ae21977e4bc..5eb3fc2ed6174 100644
--- a/server/src/main/java/org/elasticsearch/inference/InferenceService.java
+++ b/server/src/main/java/org/elasticsearch/inference/InferenceService.java
@@ -87,6 +87,16 @@ default void init(Client client) {}
      */
     void start(Model model, ActionListener listener);
 
+    /**
+     * Stop the model deployment.
+     * The default action does nothing except acknowledge the request (true).
+     * @param modelId The ID of the model to be stopped
+     * @param listener The listener
+     */
+    default void stop(String modelId, ActionListener listener) {
+        listener.onResponse(true);
+    }
+
     /**
      * Optionally test the new model configuration in the inference service.
      * This function should be called when the model is first created, the
diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java
index 1578e03608e82..86f7bcdf92a81 100644
--- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java
+++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java
@@ -15,6 +15,7 @@
 import org.elasticsearch.common.settings.Settings;
 import org.elasticsearch.common.util.concurrent.ThreadContext;
 import org.elasticsearch.inference.TaskType;
+import org.elasticsearch.plugins.Platforms;
 import org.elasticsearch.test.cluster.ElasticsearchCluster;
 import org.elasticsearch.test.cluster.local.distribution.DistributionType;
 import org.elasticsearch.test.rest.ESRestTestCase;
@@ -64,6 +65,25 @@ static String mockServiceModelConfig() {
             """;
     }
 
+    protected Map downloadElserBlocking() throws IOException {
+        String endpoint = "_ml/trained_models/.elser_model_2?wait_for_completion=true";
+        if ("linux-x86_64".equals(Platforms.PLATFORM_NAME)) {
+            endpoint = "_ml/trained_models/.elser_model_2_linux-x86_64?wait_for_completion=true";
+        }
+        String body = """
+            {
+                "input": {
+                "field_names": ["text_field"]
+                }
+            }
+            """;
+        var request = new Request("PUT", endpoint);
+        request.setJsonEntity(body);
+        var response = client().performRequest(request);
+        assertOkOrCreated(response);
+        return entityAsMap(response);
+    }
+
     protected Map putModel(String modelId, String modelConfig, TaskType taskType) throws IOException {
         String endpoint = Strings.format("_inference/%s/%s", taskType, modelId);
         var request = new Request("PUT", endpoint);
@@ -73,6 +93,14 @@ protected Map putModel(String modelId, String modelConfig, TaskT
         return entityAsMap(response);
     }
 
+    protected Map deleteModel(String modelId, TaskType taskType) throws IOException {
+        var endpoint = Strings.format("_inference/%s/%s", taskType, modelId);
+        var request = new Request("DELETE", endpoint);
+        var response = client().performRequest(request);
+        assertOkOrCreated(response);
+        return entityAsMap(response);
+    }
+
     protected Map getModels(String modelId, TaskType taskType) throws IOException {
         var endpoint = Strings.format("_inference/%s/%s", taskType, modelId);
         var request = new Request("GET", endpoint);
@@ -89,6 +117,14 @@ protected Map getAllModels() throws IOException {
         return entityAsMap(response);
     }
 
+    protected Map getTrainedModel(String modelId) throws IOException {
+        var endpoint = Strings.format("_ml/trained_models/%s/_stats", modelId);
+        var request = new Request("GET", endpoint);
+        var response = client().performRequest(request);
+        assertOkOrCreated(response);
+        return entityAsMap(response);
+    }
+
     protected Map inferOnMockService(String modelId, TaskType taskType, List input) throws IOException {
         var endpoint = Strings.format("_inference/%s/%s", taskType, modelId);
         var request = new Request("POST", endpoint);
diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java
index 61278fcae6d94..62156f3e63b9e 100644
--- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java
+++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java
@@ -3,6 +3,8 @@
  * or more contributor license agreements. Licensed under the Elastic License
  * 2.0; you may not use this file except in compliance with the Elastic License
  * 2.0.
+ *
+ * this file has been contributed to by a Generative AI
  */
 
 package org.elasticsearch.xpack.inference;
@@ -16,9 +18,44 @@
 
 import static org.hamcrest.Matchers.containsString;
 import static org.hamcrest.Matchers.hasSize;
+import static org.hamcrest.Matchers.not;
 
 public class InferenceCrudIT extends InferenceBaseRestTest {
 
+    public void testElserCrud() throws IOException {
+
+        String elserConfig = """
+            {
+              "service": "elser",
+              "service_settings": {
+                "num_allocations": 1,
+                "num_threads": 1
+              },
+              "task_settings": {}
+            }
+            """;
+
+        // ELSER not downloaded case
+        {
+            String modelId = randomAlphaOfLength(10).toLowerCase();
+            expectThrows(ResponseException.class, () -> putModel(modelId, elserConfig, TaskType.SPARSE_EMBEDDING));
+        }
+
+        downloadElserBlocking();
+
+        // Happy case
+        {
+            String modelId = randomAlphaOfLength(10).toLowerCase();
+            putModel(modelId, elserConfig, TaskType.SPARSE_EMBEDDING);
+            var models = getModels(modelId, TaskType.SPARSE_EMBEDDING);
+            assertThat(models.get("models").toString(), containsString("model_id=" + modelId));
+            deleteModel(modelId, TaskType.SPARSE_EMBEDDING);
+            expectThrows(ResponseException.class, () -> getModels(modelId, TaskType.SPARSE_EMBEDDING));
+            models = getTrainedModel("_all");
+            assertThat(models.toString(), not(containsString("deployment_id=" + modelId)));
+        }
+    }
+
     @SuppressWarnings("unchecked")
     public void testGet() throws IOException {
         for (int i = 0; i < 5; i++) {
diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportDeleteInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportDeleteInferenceModelAction.java
index ceb9fb92e3fab..cb728120d2f0b 100644
--- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportDeleteInferenceModelAction.java
+++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportDeleteInferenceModelAction.java
@@ -7,8 +7,12 @@
 
 package org.elasticsearch.xpack.inference.action;
 
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.elasticsearch.ElasticsearchStatusException;
 import org.elasticsearch.action.ActionListener;
 import org.elasticsearch.action.support.ActionFilters;
+import org.elasticsearch.action.support.SubscribableListener;
 import org.elasticsearch.action.support.master.AcknowledgedResponse;
 import org.elasticsearch.action.support.master.AcknowledgedTransportMasterNodeAction;
 import org.elasticsearch.cluster.ClusterState;
@@ -18,6 +22,8 @@
 import org.elasticsearch.cluster.service.ClusterService;
 import org.elasticsearch.common.inject.Inject;
 import org.elasticsearch.common.util.concurrent.EsExecutors;
+import org.elasticsearch.inference.InferenceServiceRegistry;
+import org.elasticsearch.rest.RestStatus;
 import org.elasticsearch.tasks.Task;
 import org.elasticsearch.threadpool.ThreadPool;
 import org.elasticsearch.transport.TransportService;
@@ -26,7 +32,10 @@
 
 public class TransportDeleteInferenceModelAction extends AcknowledgedTransportMasterNodeAction {
 
+    private static final Logger logger = LogManager.getLogger(TransportPutInferenceModelAction.class);
+
     private final ModelRegistry modelRegistry;
+    private final InferenceServiceRegistry serviceRegistry;
 
     @Inject
     public TransportDeleteInferenceModelAction(
@@ -35,7 +44,8 @@ public TransportDeleteInferenceModelAction(
         ThreadPool threadPool,
         ActionFilters actionFilters,
         IndexNameExpressionResolver indexNameExpressionResolver,
-        ModelRegistry modelRegistry
+        ModelRegistry modelRegistry,
+        InferenceServiceRegistry serviceRegistry
     ) {
         super(
             DeleteInferenceModelAction.NAME,
@@ -48,6 +58,7 @@ public TransportDeleteInferenceModelAction(
             EsExecutors.DIRECT_EXECUTOR_SERVICE
         );
         this.modelRegistry = modelRegistry;
+        this.serviceRegistry = serviceRegistry;
     }
 
     @Override
@@ -57,11 +68,29 @@ protected void masterOperation(
         ClusterState state,
         ActionListener listener
     ) {
-        modelRegistry.deleteModel(request.getModelId(), listener.delegateFailureAndWrap((l, r) -> l.onResponse(AcknowledgedResponse.TRUE)));
+        SubscribableListener.newForked(modelConfigListener -> {
+            modelRegistry.getModel(request.getModelId(), modelConfigListener);
+        }).andThen((l1, unparsedModel) -> {
+            var service = serviceRegistry.getService(unparsedModel.service());
+            if (service.isPresent()) {
+                service.get().stop(request.getModelId(), l1);
+            } else {
+                l1.onFailure(new ElasticsearchStatusException("No service found for model " + request.getModelId(), RestStatus.NOT_FOUND));
+            }
+        }).andThen((l2, didStop) -> {
+            if (didStop) {
+                modelRegistry.deleteModel(request.getModelId(), l2);
+            } else {
+                l2.onFailure(
+                    new ElasticsearchStatusException("Failed to stop model " + request.getModelId(), RestStatus.INTERNAL_SERVER_ERROR)
+                );
+            }
+        }).addListener(listener.delegateFailure((l3, didDeleteModel) -> listener.onResponse(AcknowledgedResponse.of(didDeleteModel))));
     }
 
     @Override
     protected ClusterBlockException checkBlock(DeleteInferenceModelAction.Request request, ClusterState state) {
         return state.blocks().globalBlockedException(ClusterBlockLevel.WRITE);
     }
+
 }
diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeService.java
index 01fe828d723d2..61119dd13475a 100644
--- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeService.java
+++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeService.java
@@ -3,6 +3,8 @@
  * or more contributor license agreements. Licensed under the Elastic License
  * 2.0; you may not use this file except in compliance with the Elastic License
  * 2.0.
+ *
+ * this file was contributed to by a generative AI
  */
 
 package org.elasticsearch.xpack.inference.services.elser;
@@ -24,6 +26,7 @@
 import org.elasticsearch.xpack.core.inference.results.SparseEmbeddingResults;
 import org.elasticsearch.xpack.core.ml.action.InferTrainedModelDeploymentAction;
 import org.elasticsearch.xpack.core.ml.action.StartTrainedModelDeploymentAction;
+import org.elasticsearch.xpack.core.ml.action.StopTrainedModelDeploymentAction;
 import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextExpansionConfigUpdate;
 
 import java.io.IOException;
@@ -161,6 +164,15 @@ public void start(Model model, ActionListener listener) {
         );
     }
 
+    @Override
+    public void stop(String modelId, ActionListener listener) {
+        client.execute(
+            StopTrainedModelDeploymentAction.INSTANCE,
+            new StopTrainedModelDeploymentAction.Request(modelId),
+            listener.delegateFailureAndWrap((delegatedResponseListener, response) -> delegatedResponseListener.onResponse(Boolean.TRUE))
+        );
+    }
+
     @Override
     public void infer(Model model, List input, Map taskSettings, ActionListener listener) {
         // No task settings to override with requestTaskSettings

From 731e5cb377c2b43ff2cfa723c7bb20b74b8a76f9 Mon Sep 17 00:00:00 2001
From: Nik Everett 
Date: Thu, 11 Jan 2024 11:36:49 -0500
Subject: [PATCH 47/75] ESQL: Harden asyn tests (#104272)

I've seen errors in the async tests where they expect to return an `id`
but the results come back too fast! Well, this forces us to return the
`id` even if we have the results. Then the test can happily fetch by the
`id` every time.

Closes #104251
---
 .../esql/qa/single_node/EsqlClientYamlAsyncSubmitAndFetchIT.java | 1 +
 1 file changed, 1 insertion(+)

diff --git a/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlClientYamlAsyncSubmitAndFetchIT.java b/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlClientYamlAsyncSubmitAndFetchIT.java
index 91f9540008eaa..312175c92246a 100644
--- a/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlClientYamlAsyncSubmitAndFetchIT.java
+++ b/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlClientYamlAsyncSubmitAndFetchIT.java
@@ -31,6 +31,7 @@ public static Iterable parameters() throws Exception {
             ApiCallSection copy = doSection.getApiCallSection().copyWithNewApi("esql.async_query");
             for (Map body : copy.getBodies()) {
                 body.put("wait_for_completion_timeout", "0ms");
+                body.put("keep_on_completion", true);
             }
             doSection.setApiCallSection(copy);
 

From 331f92abe3a494ce2444832a2a758c2e01102fa7 Mon Sep 17 00:00:00 2001
From: Brian Seeders 
Date: Thu, 11 Jan 2024 11:39:51 -0500
Subject: [PATCH 48/75] Bump versions after 8.11.4 release

---
 .buildkite/pipelines/intake.yml                  |  2 +-
 .buildkite/pipelines/periodic-packaging.yml      | 16 ++++++++++++++++
 .buildkite/pipelines/periodic.yml                | 10 ++++++++++
 .ci/bwcVersions                                  |  1 +
 .ci/snapshotBwcVersions                          |  2 +-
 .../src/main/java/org/elasticsearch/Version.java |  1 +
 6 files changed, 30 insertions(+), 2 deletions(-)

diff --git a/.buildkite/pipelines/intake.yml b/.buildkite/pipelines/intake.yml
index 3271007a00077..c80cd99067743 100644
--- a/.buildkite/pipelines/intake.yml
+++ b/.buildkite/pipelines/intake.yml
@@ -48,7 +48,7 @@ steps:
         timeout_in_minutes: 300
         matrix:
           setup:
-            BWC_VERSION: ["7.17.17", "8.11.4", "8.12.0", "8.13.0"]
+            BWC_VERSION: ["7.17.17", "8.11.5", "8.12.0", "8.13.0"]
         agents:
           provider: gcp
           image: family/elasticsearch-ubuntu-2004
diff --git a/.buildkite/pipelines/periodic-packaging.yml b/.buildkite/pipelines/periodic-packaging.yml
index 66eb1fc79e3ca..289139bee61b0 100644
--- a/.buildkite/pipelines/periodic-packaging.yml
+++ b/.buildkite/pipelines/periodic-packaging.yml
@@ -1793,6 +1793,22 @@ steps:
         env:
           BWC_VERSION: 8.11.4
 
+      - label: "{{matrix.image}} / 8.11.5 / packaging-tests-upgrade"
+        command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.11.5
+        timeout_in_minutes: 300
+        matrix:
+          setup:
+            image:
+              - rocky-8
+              - ubuntu-2004
+        agents:
+          provider: gcp
+          image: family/elasticsearch-{{matrix.image}}
+          machineType: custom-16-32768
+          buildDirectory: /dev/shm/bk
+        env:
+          BWC_VERSION: 8.11.5
+
       - label: "{{matrix.image}} / 8.12.0 / packaging-tests-upgrade"
         command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.12.0
         timeout_in_minutes: 300
diff --git a/.buildkite/pipelines/periodic.yml b/.buildkite/pipelines/periodic.yml
index 97d174b546e4f..6e8dc5e5265b3 100644
--- a/.buildkite/pipelines/periodic.yml
+++ b/.buildkite/pipelines/periodic.yml
@@ -1102,6 +1102,16 @@ steps:
           buildDirectory: /dev/shm/bk
         env:
           BWC_VERSION: 8.11.4
+      - label: 8.11.5 / bwc
+        command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.11.5#bwcTest
+        timeout_in_minutes: 300
+        agents:
+          provider: gcp
+          image: family/elasticsearch-ubuntu-2004
+          machineType: n1-standard-32
+          buildDirectory: /dev/shm/bk
+        env:
+          BWC_VERSION: 8.11.5
       - label: 8.12.0 / bwc
         command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.12.0#bwcTest
         timeout_in_minutes: 300
diff --git a/.ci/bwcVersions b/.ci/bwcVersions
index 569caf22ae830..f5c724dd4312c 100644
--- a/.ci/bwcVersions
+++ b/.ci/bwcVersions
@@ -109,5 +109,6 @@ BWC_VERSION:
   - "8.11.2"
   - "8.11.3"
   - "8.11.4"
+  - "8.11.5"
   - "8.12.0"
   - "8.13.0"
diff --git a/.ci/snapshotBwcVersions b/.ci/snapshotBwcVersions
index 98bfd6b50d24b..9329a13bc7411 100644
--- a/.ci/snapshotBwcVersions
+++ b/.ci/snapshotBwcVersions
@@ -1,5 +1,5 @@
 BWC_VERSION:
   - "7.17.17"
-  - "8.11.4"
+  - "8.11.5"
   - "8.12.0"
   - "8.13.0"
diff --git a/server/src/main/java/org/elasticsearch/Version.java b/server/src/main/java/org/elasticsearch/Version.java
index 4181b077cb185..126893bc36274 100644
--- a/server/src/main/java/org/elasticsearch/Version.java
+++ b/server/src/main/java/org/elasticsearch/Version.java
@@ -160,6 +160,7 @@ public class Version implements VersionId, ToXContentFragment {
     public static final Version V_8_11_2 = new Version(8_11_02_99);
     public static final Version V_8_11_3 = new Version(8_11_03_99);
     public static final Version V_8_11_4 = new Version(8_11_04_99);
+    public static final Version V_8_11_5 = new Version(8_11_05_99);
     public static final Version V_8_12_0 = new Version(8_12_00_99);
     public static final Version V_8_13_0 = new Version(8_13_00_99);
     public static final Version CURRENT = V_8_13_0;

From b72b9f299810ec0ef4e26f1beff42831025e2d95 Mon Sep 17 00:00:00 2001
From: Brian Seeders 
Date: Thu, 11 Jan 2024 11:42:36 -0500
Subject: [PATCH 49/75] Prune changelogs after 8.11.4 release

---
 docs/changelog/102843.yaml | 5 -----
 docs/changelog/102848.yaml | 5 -----
 docs/changelog/103003.yaml | 6 ------
 docs/changelog/103151.yaml | 6 ------
 docs/changelog/103474.yaml | 6 ------
 docs/changelog/103611.yaml | 6 ------
 docs/changelog/103758.yaml | 5 -----
 docs/changelog/103865.yaml | 5 -----
 8 files changed, 44 deletions(-)
 delete mode 100644 docs/changelog/102843.yaml
 delete mode 100644 docs/changelog/102848.yaml
 delete mode 100644 docs/changelog/103003.yaml
 delete mode 100644 docs/changelog/103151.yaml
 delete mode 100644 docs/changelog/103474.yaml
 delete mode 100644 docs/changelog/103611.yaml
 delete mode 100644 docs/changelog/103758.yaml
 delete mode 100644 docs/changelog/103865.yaml

diff --git a/docs/changelog/102843.yaml b/docs/changelog/102843.yaml
deleted file mode 100644
index 7e561fa7cc582..0000000000000
--- a/docs/changelog/102843.yaml
+++ /dev/null
@@ -1,5 +0,0 @@
-pr: 102843
-summary: Restore `SharedBytes.IO` refcounting on reads & writes
-area: Snapshot/Restore
-type: bug
-issues: []
diff --git a/docs/changelog/102848.yaml b/docs/changelog/102848.yaml
deleted file mode 100644
index 971d91a878579..0000000000000
--- a/docs/changelog/102848.yaml
+++ /dev/null
@@ -1,5 +0,0 @@
-pr: 102848
-summary: Decref `SharedBytes.IO` after read is done not before
-area: Snapshot/Restore
-type: bug
-issues: []
diff --git a/docs/changelog/103003.yaml b/docs/changelog/103003.yaml
deleted file mode 100644
index accacc2b62416..0000000000000
--- a/docs/changelog/103003.yaml
+++ /dev/null
@@ -1,6 +0,0 @@
-pr: 103003
-summary: "Fix: Watcher REST API `GET /_watcher/settings` now includes product header"
-area: "Watcher"
-type: bug
-issues:
- - 102928
diff --git a/docs/changelog/103151.yaml b/docs/changelog/103151.yaml
deleted file mode 100644
index bd9eea97cac6d..0000000000000
--- a/docs/changelog/103151.yaml
+++ /dev/null
@@ -1,6 +0,0 @@
-pr: 103151
-summary: Wrap painless explain error
-area: Infra/Scripting
-type: bug
-issues:
- - 103018
diff --git a/docs/changelog/103474.yaml b/docs/changelog/103474.yaml
deleted file mode 100644
index a1da15a6bfbe5..0000000000000
--- a/docs/changelog/103474.yaml
+++ /dev/null
@@ -1,6 +0,0 @@
-pr: 103474
-summary: Fix now in millis for ESQL search contexts
-area: ES|QL
-type: bug
-issues:
- - 103455
diff --git a/docs/changelog/103611.yaml b/docs/changelog/103611.yaml
deleted file mode 100644
index 51c77cd286d66..0000000000000
--- a/docs/changelog/103611.yaml
+++ /dev/null
@@ -1,6 +0,0 @@
-pr: 103611
-summary: Fix NPE on missing event queries
-area: EQL
-type: bug
-issues:
- - 103608
diff --git a/docs/changelog/103758.yaml b/docs/changelog/103758.yaml
deleted file mode 100644
index e77f228f134a0..0000000000000
--- a/docs/changelog/103758.yaml
+++ /dev/null
@@ -1,5 +0,0 @@
-pr: 103758
-summary: Fix the transport version of `PlanStreamOutput`
-area: ES|QL
-type: bug
-issues: []
diff --git a/docs/changelog/103865.yaml b/docs/changelog/103865.yaml
deleted file mode 100644
index 5c9570f32c44e..0000000000000
--- a/docs/changelog/103865.yaml
+++ /dev/null
@@ -1,5 +0,0 @@
-pr: 103865
-summary: Revert change
-area: Mapping
-type: bug
-issues: []

From f77bb0837d54ed580b3946140a95ad6765cd2144 Mon Sep 17 00:00:00 2001
From: Max Hniebergall <137079448+maxhniebergall@users.noreply.github.com>
Date: Thu, 11 Jan 2024 11:56:30 -0500
Subject: [PATCH 50/75] Updated missingTrainedModel message to include: you may
 need to create it (#104155)

* Updated missingTrainedModel message to include: you may need to create it

* Update docs/changelog/104155.yaml

* Added special exception message for ELSER

* Use ExceptionsHelper.unwrapCause instead of instanceof

Co-authored-by: David Kyle 

* Added missing import

---------

Co-authored-by: David Kyle 
---
 docs/changelog/104155.yaml                    |  6 ++
 .../xpack/core/ml/utils/ExceptionsHelper.java |  4 +-
 .../services/elser/ElserMlNodeService.java    | 59 ++++++++++++++-----
 3 files changed, 52 insertions(+), 17 deletions(-)
 create mode 100644 docs/changelog/104155.yaml

diff --git a/docs/changelog/104155.yaml b/docs/changelog/104155.yaml
new file mode 100644
index 0000000000000..04d6a9920310a
--- /dev/null
+++ b/docs/changelog/104155.yaml
@@ -0,0 +1,6 @@
+pr: 104155
+summary: "Updated `missingTrainedModel` message to include: you may need to create\
+  \ it"
+area: Machine Learning
+type: enhancement
+issues: []
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/ExceptionsHelper.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/ExceptionsHelper.java
index 33eac554d0129..57b08ad3f3e31 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/ExceptionsHelper.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/ExceptionsHelper.java
@@ -51,11 +51,11 @@ public static ResourceNotFoundException missingModelDeployment(String deployment
     }
 
     public static ResourceNotFoundException missingTrainedModel(String modelId) {
-        return new ResourceNotFoundException("No known trained model with model_id [{}]", modelId);
+        return new ResourceNotFoundException("No known trained model with model_id [{}], you may need to create it", modelId);
     }
 
     public static ResourceNotFoundException missingTrainedModel(String modelId, Exception cause) {
-        return new ResourceNotFoundException("No known trained model with model_id [{}]", cause, modelId);
+        return new ResourceNotFoundException("No known trained model with model_id [{}], you may need to create it", cause, modelId);
     }
 
     public static ElasticsearchException serverError(String msg) {
diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeService.java
index 61119dd13475a..6f1da745b3c23 100644
--- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeService.java
+++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeService.java
@@ -4,12 +4,13 @@
  * 2.0; you may not use this file except in compliance with the Elastic License
  * 2.0.
  *
- * this file was contributed to by a generative AI
+ * this file has been contributed to by a Generative AI
  */
 
 package org.elasticsearch.xpack.inference.services.elser;
 
 import org.elasticsearch.ElasticsearchStatusException;
+import org.elasticsearch.ResourceNotFoundException;
 import org.elasticsearch.TransportVersion;
 import org.elasticsearch.TransportVersions;
 import org.elasticsearch.action.ActionListener;
@@ -24,10 +25,12 @@
 import org.elasticsearch.rest.RestStatus;
 import org.elasticsearch.xpack.core.ClientHelper;
 import org.elasticsearch.xpack.core.inference.results.SparseEmbeddingResults;
+import org.elasticsearch.xpack.core.ml.action.CreateTrainedModelAssignmentAction;
 import org.elasticsearch.xpack.core.ml.action.InferTrainedModelDeploymentAction;
 import org.elasticsearch.xpack.core.ml.action.StartTrainedModelDeploymentAction;
 import org.elasticsearch.xpack.core.ml.action.StopTrainedModelDeploymentAction;
 import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextExpansionConfigUpdate;
+import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper;
 
 import java.io.IOException;
 import java.util.List;
@@ -73,16 +76,8 @@ public ElserMlNodeModel parseRequestConfig(
         Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS);
         var serviceSettingsBuilder = ElserMlNodeServiceSettings.fromMap(serviceSettingsMap);
 
-        // choose a default model version based on the cluster architecture
         if (serviceSettingsBuilder.getModelVariant() == null) {
-            boolean homogenous = modelArchitectures.size() == 1;
-            if (homogenous && modelArchitectures.iterator().next().equals("linux-x86_64")) {
-                // Use the hardware optimized model
-                serviceSettingsBuilder.setModelVariant(ELSER_V2_MODEL_LINUX_X86);
-            } else {
-                // default to the platform-agnostic model
-                serviceSettingsBuilder.setModelVariant(ELSER_V2_MODEL);
-            }
+            serviceSettingsBuilder.setModelVariant(selectDefaultModelVersionBasedOnClusterArchitecture(modelArchitectures));
         }
 
         Map taskSettingsMap;
@@ -102,6 +97,18 @@ public ElserMlNodeModel parseRequestConfig(
         return new ElserMlNodeModel(modelId, taskType, NAME, serviceSettingsBuilder.build(), taskSettings);
     }
 
+    private static String selectDefaultModelVersionBasedOnClusterArchitecture(Set modelArchitectures) {
+        // choose a default model version based on the cluster architecture
+        boolean homogenous = modelArchitectures.size() == 1;
+        if (homogenous && modelArchitectures.iterator().next().equals("linux-x86_64")) {
+            // Use the hardware optimized model
+            return ELSER_V2_MODEL_LINUX_X86;
+        } else {
+            // default to the platform-agnostic model
+            return ELSER_V2_MODEL;
+        }
+    }
+
     @Override
     public ElserMlNodeModel parsePersistedConfigWithSecrets(
         String modelId,
@@ -157,11 +164,33 @@ public void start(Model model, ActionListener listener) {
         startRequest.setThreadsPerAllocation(serviceSettings.getNumThreads());
         startRequest.setWaitForState(STARTED);
 
-        client.execute(
-            StartTrainedModelDeploymentAction.INSTANCE,
-            startRequest,
-            listener.delegateFailureAndWrap((l, r) -> l.onResponse(Boolean.TRUE))
-        );
+        client.execute(StartTrainedModelDeploymentAction.INSTANCE, startRequest, elserNotDownloadedListener(model, listener));
+    }
+
+    private static ActionListener elserNotDownloadedListener(
+        Model model,
+        ActionListener listener
+    ) {
+        return new ActionListener<>() {
+            @Override
+            public void onResponse(CreateTrainedModelAssignmentAction.Response response) {
+                listener.onResponse(Boolean.TRUE);
+            }
+
+            @Override
+            public void onFailure(Exception e) {
+                if (ExceptionsHelper.unwrapCause(e) instanceof ResourceNotFoundException) {
+                    listener.onFailure(
+                        new ResourceNotFoundException(
+                            "Could not start the ELSER service as the ELSER model for this platform cannot be found."
+                                + " ELSER needs to be downloaded before it can be started"
+                        )
+                    );
+                    return;
+                }
+                listener.onFailure(e);
+            }
+        };
     }
 
     @Override

From 9bc579f942fad29b2136717a741476bddb36b995 Mon Sep 17 00:00:00 2001
From: Max Hniebergall <137079448+maxhniebergall@users.noreply.github.com>
Date: Thu, 11 Jan 2024 12:19:29 -0500
Subject: [PATCH 51/75] Disable elser download test case in inf IT (#104271)

---
 .../inference/InferenceBaseRestTest.java      | 20 -------------------
 .../xpack/inference/InferenceCrudIT.java      | 17 +++-------------
 2 files changed, 3 insertions(+), 34 deletions(-)

diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java
index 86f7bcdf92a81..c8af3fc64521f 100644
--- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java
+++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java
@@ -15,7 +15,6 @@
 import org.elasticsearch.common.settings.Settings;
 import org.elasticsearch.common.util.concurrent.ThreadContext;
 import org.elasticsearch.inference.TaskType;
-import org.elasticsearch.plugins.Platforms;
 import org.elasticsearch.test.cluster.ElasticsearchCluster;
 import org.elasticsearch.test.cluster.local.distribution.DistributionType;
 import org.elasticsearch.test.rest.ESRestTestCase;
@@ -65,25 +64,6 @@ static String mockServiceModelConfig() {
             """;
     }
 
-    protected Map downloadElserBlocking() throws IOException {
-        String endpoint = "_ml/trained_models/.elser_model_2?wait_for_completion=true";
-        if ("linux-x86_64".equals(Platforms.PLATFORM_NAME)) {
-            endpoint = "_ml/trained_models/.elser_model_2_linux-x86_64?wait_for_completion=true";
-        }
-        String body = """
-            {
-                "input": {
-                "field_names": ["text_field"]
-                }
-            }
-            """;
-        var request = new Request("PUT", endpoint);
-        request.setJsonEntity(body);
-        var response = client().performRequest(request);
-        assertOkOrCreated(response);
-        return entityAsMap(response);
-    }
-
     protected Map putModel(String modelId, String modelConfig, TaskType taskType) throws IOException {
         String endpoint = Strings.format("_inference/%s/%s", taskType, modelId);
         var request = new Request("PUT", endpoint);
diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java
index 62156f3e63b9e..ee99745d40090 100644
--- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java
+++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java
@@ -18,7 +18,6 @@
 
 import static org.hamcrest.Matchers.containsString;
 import static org.hamcrest.Matchers.hasSize;
-import static org.hamcrest.Matchers.not;
 
 public class InferenceCrudIT extends InferenceBaseRestTest {
 
@@ -41,19 +40,9 @@ public void testElserCrud() throws IOException {
             expectThrows(ResponseException.class, () -> putModel(modelId, elserConfig, TaskType.SPARSE_EMBEDDING));
         }
 
-        downloadElserBlocking();
-
-        // Happy case
-        {
-            String modelId = randomAlphaOfLength(10).toLowerCase();
-            putModel(modelId, elserConfig, TaskType.SPARSE_EMBEDDING);
-            var models = getModels(modelId, TaskType.SPARSE_EMBEDDING);
-            assertThat(models.get("models").toString(), containsString("model_id=" + modelId));
-            deleteModel(modelId, TaskType.SPARSE_EMBEDDING);
-            expectThrows(ResponseException.class, () -> getModels(modelId, TaskType.SPARSE_EMBEDDING));
-            models = getTrainedModel("_all");
-            assertThat(models.toString(), not(containsString("deployment_id=" + modelId)));
-        }
+        // Happy Case
+        // We choose not to test the case where ELSER is downloaded to avoid causing excessive network traffic.
+        // This test case will be tested separately outside of CI
     }
 
     @SuppressWarnings("unchecked")

From 5b019c914395899dabb3a2f06195060380d9f350 Mon Sep 17 00:00:00 2001
From: Nhat Nguyen 
Date: Thu, 11 Jan 2024 09:23:18 -0800
Subject: [PATCH 52/75] Speed up resolving keep command (#104246)

We accidentally have O(N^3) when resolving the keep command.

The first loop:
https://github.com/elastic/elasticsearch/blob/8e3efae03df386dc80ab1a4ac3e620d16dc9828c/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java#L466

The second loop:
https://github.com/elastic/elasticsearch/blob/8e3efae03df386dc80ab1a4ac3e620d16dc9828c/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java#L467

The third loop:
https://github.com/elastic/elasticsearch/blob/8e3efae03df386dc80ab1a4ac3e620d16dc9828c/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/analyzer/AnalyzerRules.java#L160

Resolving 2000 fields can take 35 seconds on a fast machine and more
than 5 minutes on a slow machine. Although I think we should try to make
this linear if possible, this quick fix only changes the resolution to
O(N^2). This reduces the resolution time from 35s to 170ms (200 times
faster) for 2000 fields. This is good enough to re-enable the HeapAttack
tests.

Relates #104240
---
 .../xpack/esql/analysis/Analyzer.java         | 38 +++++++++----------
 1 file changed, 18 insertions(+), 20 deletions(-)

diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java
index 681027392c358..af5a0bd3f0b70 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java
@@ -463,26 +463,24 @@ private LogicalPlan resolveKeep(Project p, List childOutput) {
             // otherwise resolve them
             else {
                 Map priorities = new LinkedHashMap<>();
-                for (Attribute attribute : childOutput) {
-                    for (var proj : projections) {
-                        List resolved;
-                        int priority;
-                        if (proj instanceof UnresolvedStar) {
-                            resolved = childOutput;
-                            priority = 2;
-                        } else if (proj instanceof UnresolvedAttribute ua) {
-                            resolved = resolveAgainstList(ua, childOutput);
-                            priority = Regex.isSimpleMatchPattern(ua.name()) ? 1 : 0;
-                        } else {
-                            resolved = List.of(attribute);
-                            priority = 0;
-                        }
-                        for (Attribute attr : resolved) {
-                            Integer previousPrio = priorities.get(attr);
-                            if (previousPrio == null || previousPrio >= priority) {
-                                priorities.remove(attr);
-                                priorities.put(attr, priority);
-                            }
+                for (var proj : projections) {
+                    final List resolved;
+                    final int priority;
+                    if (proj instanceof UnresolvedStar) {
+                        resolved = childOutput;
+                        priority = 2;
+                    } else if (proj instanceof UnresolvedAttribute ua) {
+                        resolved = resolveAgainstList(ua, childOutput);
+                        priority = Regex.isSimpleMatchPattern(ua.name()) ? 1 : 0;
+                    } else {
+                        assert false : "unexpected projection: " + proj;
+                        throw new IllegalStateException("unexpected projection: " + proj);
+                    }
+                    for (Attribute attr : resolved) {
+                        Integer previousPrio = priorities.get(attr);
+                        if (previousPrio == null || previousPrio >= priority) {
+                            priorities.remove(attr);
+                            priorities.put(attr, priority);
                         }
                     }
                 }

From 52bd9e895c1daf37758c190ec27e1131271c5dd3 Mon Sep 17 00:00:00 2001
From: Nhat Nguyen 
Date: Thu, 11 Jan 2024 10:08:03 -0800
Subject: [PATCH 53/75] Unmute HeapAttack#testHugeManyConcat (#104241)

The heap attack recent failures are because of #104240. Analyzing 2000
fields took more than 5 minutes in CI. With the fix in #104246, we can
enable this test now.

Relates #10424
---
 .../elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java   | 5 ++++-
 1 file changed, 4 insertions(+), 1 deletion(-)

diff --git a/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java
index 25e8dc7b220c0..578c29d210797 100644
--- a/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java
+++ b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java
@@ -68,6 +68,8 @@ public class HeapAttackIT extends ESRestTestCase {
         .setting("xpack.license.self_generated.type", "trial")
         .build();
 
+    static volatile boolean SUITE_ABORTED = false;
+
     @Override
     protected String getTestRestCluster() {
         return cluster.getHttpAddresses();
@@ -219,7 +221,6 @@ public void testManyConcat() throws IOException {
     /**
      * Hits a circuit breaker by building many moderately long strings.
      */
-    @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/pull/104241")
     public void testHugeManyConcat() throws IOException {
         initManyLongs();
         assertCircuitBreaks(() -> manyConcat(2000));
@@ -319,6 +320,7 @@ public void onFailure(Exception e) {
 
                 @Override
                 protected void doRun() throws Exception {
+                    SUITE_ABORTED = true;
                     TimeValue elapsed = TimeValue.timeValueNanos(System.nanoTime() - startedTimeInNanos);
                     logger.info("--> test {} triggering OOM after {}", getTestName(), elapsed);
                     Request triggerOOM = new Request("POST", "/_trigger_out_of_memory");
@@ -550,6 +552,7 @@ private static void assertWriteResponse(Response response) throws IOException {
     @Before
     @After
     public void assertRequestBreakerEmpty() throws Exception {
+        assumeFalse("suite was aborted", SUITE_ABORTED);
         assertBusy(() -> {
             HttpEntity entity = adminClient().performRequest(new Request("GET", "/_nodes/stats")).getEntity();
             Map stats = XContentHelper.convertToMap(XContentType.JSON.xContent(), entity.getContent(), false);

From 10a84cbee9cadc5c7d3f0c4af53b9cb3b3ad3f63 Mon Sep 17 00:00:00 2001
From: Brian Seeders 
Date: Thu, 11 Jan 2024 16:13:37 -0500
Subject: [PATCH 54/75] [main] Add 8.11.4 release notes (#104276) (#104282)

---
 docs/reference/release-notes.asciidoc        |  2 ++
 docs/reference/release-notes/8.11.4.asciidoc | 31 ++++++++++++++++++++
 2 files changed, 33 insertions(+)
 create mode 100644 docs/reference/release-notes/8.11.4.asciidoc

diff --git a/docs/reference/release-notes.asciidoc b/docs/reference/release-notes.asciidoc
index 068cb3d2f127b..1aebf005a64e3 100644
--- a/docs/reference/release-notes.asciidoc
+++ b/docs/reference/release-notes.asciidoc
@@ -8,6 +8,7 @@ This section summarizes the changes in each release.
 
 * <>
 * <>
+* <>
 * <>
 * <>
 * <>
@@ -60,6 +61,7 @@ This section summarizes the changes in each release.
 
 include::release-notes/8.13.0.asciidoc[]
 include::release-notes/8.12.0.asciidoc[]
+include::release-notes/8.11.4.asciidoc[]
 include::release-notes/8.11.3.asciidoc[]
 include::release-notes/8.11.2.asciidoc[]
 include::release-notes/8.11.1.asciidoc[]
diff --git a/docs/reference/release-notes/8.11.4.asciidoc b/docs/reference/release-notes/8.11.4.asciidoc
new file mode 100644
index 0000000000000..0fd57c97b1a89
--- /dev/null
+++ b/docs/reference/release-notes/8.11.4.asciidoc
@@ -0,0 +1,31 @@
+[[release-notes-8.11.4]]
+== {es} version 8.11.4
+
+Also see <>.
+
+[[bug-8.11.4]]
+[float]
+=== Bug fixes
+
+EQL::
+* Fix NPE on missing event queries {es-pull}103611[#103611] (issue: {es-issue}103608[#103608])
+
+ES|QL::
+* Fix now in millis for ESQL search contexts {es-pull}103474[#103474] (issue: {es-issue}103455[#103455])
+* Fix the transport version of `PlanStreamOutput` {es-pull}103758[#103758]
+* `AsyncOperator#isFinished` must never return true on failure {es-pull}104029[#104029]
+
+Infra/Scripting::
+* Wrap painless explain error {es-pull}103151[#103151] (issue: {es-issue}103018[#103018])
+
+Mapping::
+* Revert change {es-pull}103865[#103865]
+
+Snapshot/Restore::
+* Decref `SharedBytes.IO` after read is done not before {es-pull}102848[#102848]
+* Restore `SharedBytes.IO` refcounting on reads & writes {es-pull}102843[#102843]
+
+Watcher::
+* Fix: Watcher REST API `GET /_watcher/settings` now includes product header {es-pull}103003[#103003] (issue: {es-issue}102928[#102928])
+
+

From a354b45613bc1b1170656e37684b62886638e191 Mon Sep 17 00:00:00 2001
From: Brian Seeders 
Date: Thu, 11 Jan 2024 16:17:47 -0500
Subject: [PATCH 55/75] [ci] Add Amazon Linux 2 to platform support pipeline
 (#104229)

---
 .buildkite/pipelines/periodic-platform-support.yml | 1 +
 1 file changed, 1 insertion(+)

diff --git a/.buildkite/pipelines/periodic-platform-support.yml b/.buildkite/pipelines/periodic-platform-support.yml
index faf904f2f8b04..0240fd03f4a89 100644
--- a/.buildkite/pipelines/periodic-platform-support.yml
+++ b/.buildkite/pipelines/periodic-platform-support.yml
@@ -89,6 +89,7 @@ steps:
           setup:
             image:
               - amazonlinux-2023
+              - amazonlinux-2
         agents:
           provider: aws
           imagePrefix: elasticsearch-{{matrix.image}}

From 89fd3d157ac7d6b8e68af2ad32e3d115035abc6c Mon Sep 17 00:00:00 2001
From: Mark Vieira 
Date: Thu, 11 Jan 2024 15:42:35 -0800
Subject: [PATCH 56/75] Mute TopNOperatorTests.testRandomMultiValuesTopN

---
 .../elasticsearch/compute/operator/topn/TopNOperatorTests.java   | 1 +
 1 file changed, 1 insertion(+)

diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java
index ba4f547d80ce1..22b17190c0355 100644
--- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java
+++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java
@@ -936,6 +936,7 @@ private void assertSortingOnMV(
         assertMap(actualValues, matchesList(List.of(expectedValues.subList(0, topCount))));
     }
 
+    @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/104167")
     public void testRandomMultiValuesTopN() {
         DriverContext driverContext = driverContext();
         int rows = randomIntBetween(50, 100);

From 35849bbcbbe5a4d6904152d99c3d11a806fa3483 Mon Sep 17 00:00:00 2001
From: Mark Vieira 
Date: Thu, 11 Jan 2024 15:45:58 -0800
Subject: [PATCH 57/75] AwaitsFix #102657

---
 .../xpack/ml/integration/BasicDistributedJobsIT.java             | 1 +
 1 file changed, 1 insertion(+)

diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/BasicDistributedJobsIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/BasicDistributedJobsIT.java
index 822f8df35949e..6dbec53994b2e 100644
--- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/BasicDistributedJobsIT.java
+++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/BasicDistributedJobsIT.java
@@ -261,6 +261,7 @@ public void testDedicatedMlNode() throws Exception {
         });
     }
 
+    @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/102657")
     public void testMaxConcurrentJobAllocations() throws Exception {
         int numMlNodes = 2;
         internalCluster().ensureAtMostNumDataNodes(0);

From 6167afb6334c35a28aa0a922c22d4701c9aa18d0 Mon Sep 17 00:00:00 2001
From: Benjamin Trent 
Date: Thu, 11 Jan 2024 18:49:56 -0500
Subject: [PATCH 58/75] Suppress this-escape warnings (#104275)

making JDK21+ happy
---
 .../org/elasticsearch/xpack/security/authc/TokenService.java     | 1 +
 1 file changed, 1 insertion(+)

diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java
index 0bf0ab565d015..5a8b228a1145c 100644
--- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java
+++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java
@@ -234,6 +234,7 @@ public class TokenService {
     /**
      * Creates a new token service
      */
+    @SuppressWarnings("this-escape")
     public TokenService(
         Settings settings,
         Clock clock,

From ec8a227483e07519b5202fe91376769836e54fc7 Mon Sep 17 00:00:00 2001
From: Yang Wang 
Date: Fri, 12 Jan 2024 19:28:15 +1100
Subject: [PATCH 59/75] [Test] Remove mocker appender before closing it
 (#104300)

The appender must be removed first before closing. Otherwise, the test
sometimes tries to log additional messages when the appender is closed.

Resolves: #103782
---
 .../elasticsearch/http/AbstractHttpServerTransportTests.java   | 3 +--
 1 file changed, 1 insertion(+), 2 deletions(-)

diff --git a/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java b/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java
index 6c1e1293e099a..7f5c623dbae08 100644
--- a/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java
+++ b/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java
@@ -1013,7 +1013,6 @@ public void testStopForceClosesConnectionDuringRequest() throws Exception {
         }
     }
 
-    @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/103782")
     public void testStopClosesChannelAfterRequest() throws Exception {
         var grace = LONG_GRACE_PERIOD_MS;
         try (var noTimeout = LogExpectation.unexpectedTimeout(grace); var transport = new TestHttpServerTransport(gracePeriod(grace))) {
@@ -1390,8 +1389,8 @@ public void assertExpectationsMatched() {
 
         @Override
         public void close() {
-            appender.stop();
             Loggers.removeAppender(mockLogger, appender);
+            appender.stop();
             if (checked == false) {
                 fail("did not check expectations matched in TimedOutLogExpectation");
             }

From af9bac943d2b8be619085f41881f0e0edd9bce46 Mon Sep 17 00:00:00 2001
From: David Turner 
Date: Fri, 12 Jan 2024 09:24:17 +0000
Subject: [PATCH 60/75] Make `recoverLocallyUpToGlobalCheckpoint` async
 (#104266)

The flush at the end of translog recovery is potentially async, which we
adapt into today's sync `recoverLocallyUpToGlobalCheckpoint` by blocking
on a future. This is far from ideal, so with this commit we make it
fully non-blocking.
---
 .../elasticsearch/action/ActionListener.java  |  22 ++-
 .../index/engine/InternalEngine.java          |  26 ++-
 .../index/engine/ReadOnlyEngine.java          |  12 +-
 .../elasticsearch/index/shard/IndexShard.java | 161 +++++++++++-------
 .../recovery/PeerRecoveryTargetService.java   |   9 +-
 .../action/ActionListenerTests.java           |  72 ++++++++
 .../index/shard/IndexShardTestCase.java       |   2 +-
 7 files changed, 214 insertions(+), 90 deletions(-)

diff --git a/server/src/main/java/org/elasticsearch/action/ActionListener.java b/server/src/main/java/org/elasticsearch/action/ActionListener.java
index aebe4922e416a..dea9c24190dad 100644
--- a/server/src/main/java/org/elasticsearch/action/ActionListener.java
+++ b/server/src/main/java/org/elasticsearch/action/ActionListener.java
@@ -349,7 +349,7 @@ public boolean equals(Object obj) {
     /**
      * Execute the given action in a {@code try/catch} block which feeds all exceptions to the given listener's {@link #onFailure} method.
      */
-    static > void run(L listener, CheckedConsumer action) {
+    static > void run(L listener, CheckedConsumer action) {
         try {
             action.accept(listener);
         } catch (Exception e) {
@@ -357,4 +357,24 @@ static > void run(L listener, CheckedConsumer void runWithResource(
+        ActionListener listener,
+        CheckedSupplier resourceSupplier,
+        CheckedBiConsumer, R, ? extends Exception> action
+    ) {
+        R resource;
+        try {
+            resource = resourceSupplier.get();
+        } catch (Exception e) {
+            safeOnFailure(listener, e);
+            return;
+        }
+
+        ActionListener.run(ActionListener.runBefore(listener, resource::close), l -> action.accept(l, resource));
+    }
+
 }
diff --git a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java
index 08fc9e55fd408..8affee4330074 100644
--- a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java
+++ b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java
@@ -564,21 +564,19 @@ private void bootstrapAppendOnlyInfoFromWriter(IndexWriter writer) {
 
     @Override
     public void recoverFromTranslog(TranslogRecoveryRunner translogRecoveryRunner, long recoverUpToSeqNo, ActionListener listener) {
-        ActionListener.run(listener, l -> {
-            try (var ignored = acquireEnsureOpenRef()) {
-                if (pendingTranslogRecovery.get() == false) {
-                    throw new IllegalStateException("Engine has already been recovered");
-                }
-                recoverFromTranslogInternal(translogRecoveryRunner, recoverUpToSeqNo, l.delegateResponse((ll, e) -> {
-                    try {
-                        pendingTranslogRecovery.set(true); // just play safe and never allow commits on this see #ensureCanFlush
-                        failEngine("failed to recover from translog", e);
-                    } catch (Exception inner) {
-                        e.addSuppressed(inner);
-                    }
-                    ll.onFailure(e);
-                }));
+        ActionListener.runWithResource(listener, this::acquireEnsureOpenRef, (l, ignoredRef) -> {
+            if (pendingTranslogRecovery.get() == false) {
+                throw new IllegalStateException("Engine has already been recovered");
             }
+            recoverFromTranslogInternal(translogRecoveryRunner, recoverUpToSeqNo, l.delegateResponse((ll, e) -> {
+                try {
+                    pendingTranslogRecovery.set(true); // just play safe and never allow commits on this see #ensureCanFlush
+                    failEngine("failed to recover from translog", e);
+                } catch (Exception inner) {
+                    e.addSuppressed(inner);
+                }
+                ll.onFailure(e);
+            }));
         });
     }
 
diff --git a/server/src/main/java/org/elasticsearch/index/engine/ReadOnlyEngine.java b/server/src/main/java/org/elasticsearch/index/engine/ReadOnlyEngine.java
index 7d5410cf488d7..b2326b749f970 100644
--- a/server/src/main/java/org/elasticsearch/index/engine/ReadOnlyEngine.java
+++ b/server/src/main/java/org/elasticsearch/index/engine/ReadOnlyEngine.java
@@ -525,13 +525,11 @@ public void recoverFromTranslog(
         final long recoverUpToSeqNo,
         ActionListener listener
     ) {
-        ActionListener.run(listener, l -> {
-            try (var ignored = acquireEnsureOpenRef()) {
-                try {
-                    translogRecoveryRunner.run(this, Translog.Snapshot.EMPTY);
-                } catch (final Exception e) {
-                    throw new EngineException(shardId, "failed to recover from empty translog snapshot", e);
-                }
+        ActionListener.runWithResource(listener, this::acquireEnsureOpenRef, (l, ignoredRef) -> {
+            try {
+                translogRecoveryRunner.run(this, Translog.Snapshot.EMPTY);
+            } catch (final Exception e) {
+                throw new EngineException(shardId, "failed to recover from empty translog snapshot", e);
             }
             l.onResponse(null);
         });
diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java
index aa6e3e1d45003..65291a99c25a3 100644
--- a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java
+++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java
@@ -1740,59 +1740,87 @@ public void prepareForIndexRecovery() {
     }
 
     /**
-     * A best effort to bring up this shard to the global checkpoint using the local translog before performing a peer recovery.
+     * A best-effort attempt to bring up this shard to the global checkpoint using the local translog before performing a peer recovery.
      *
-     * @return a sequence number that an operation-based peer recovery can start with.
-     * This is the first operation after the local checkpoint of the safe commit if exists.
+     * @param recoveryStartingSeqNoListener a listener to be completed with the sequence number from which an operation-based peer recovery
+     *                                      can start. This is the first operation after the local checkpoint of the safe commit if exists.
      */
-    public long recoverLocallyUpToGlobalCheckpoint() {
-        assert Thread.holdsLock(mutex) == false : "recover locally under mutex";
+    public void recoverLocallyUpToGlobalCheckpoint(ActionListener recoveryStartingSeqNoListener) {
+        assert Thread.holdsLock(mutex) == false : "must not hold the mutex here";
         if (state != IndexShardState.RECOVERING) {
-            throw new IndexShardNotRecoveringException(shardId, state);
+            recoveryStartingSeqNoListener.onFailure(new IndexShardNotRecoveringException(shardId, state));
+            return;
+        }
+        try {
+            recoveryState.validateCurrentStage(RecoveryState.Stage.INDEX);
+        } catch (Exception e) {
+            recoveryStartingSeqNoListener.onFailure(e);
+            return;
         }
-        recoveryState.validateCurrentStage(RecoveryState.Stage.INDEX);
         assert routingEntry().recoverySource().getType() == RecoverySource.Type.PEER : "not a peer recovery [" + routingEntry() + "]";
-        final Optional safeCommit;
-        final long globalCheckpoint;
         try {
-            final String translogUUID = store.readLastCommittedSegmentsInfo().getUserData().get(Translog.TRANSLOG_UUID_KEY);
-            globalCheckpoint = Translog.readGlobalCheckpoint(translogConfig.getTranslogPath(), translogUUID);
-            safeCommit = store.findSafeIndexCommit(globalCheckpoint);
+            final var translogUUID = store.readLastCommittedSegmentsInfo().getUserData().get(Translog.TRANSLOG_UUID_KEY);
+            final var globalCheckpoint = Translog.readGlobalCheckpoint(translogConfig.getTranslogPath(), translogUUID);
+            final var safeCommit = store.findSafeIndexCommit(globalCheckpoint);
+            ActionListener.run(recoveryStartingSeqNoListener.delegateResponse((l, e) -> {
+                logger.debug(() -> format("failed to recover shard locally up to global checkpoint %s", globalCheckpoint), e);
+                l.onResponse(UNASSIGNED_SEQ_NO);
+            }), l -> doLocalRecovery(globalCheckpoint, safeCommit, l));
         } catch (org.apache.lucene.index.IndexNotFoundException e) {
             logger.trace("skip local recovery as no index commit found");
-            return UNASSIGNED_SEQ_NO;
+            recoveryStartingSeqNoListener.onResponse(UNASSIGNED_SEQ_NO);
         } catch (Exception e) {
             logger.debug("skip local recovery as failed to find the safe commit", e);
-            return UNASSIGNED_SEQ_NO;
+            recoveryStartingSeqNoListener.onResponse(UNASSIGNED_SEQ_NO);
         }
-        try {
-            maybeCheckIndex(); // check index here and won't do it again if ops-based recovery occurs
-            recoveryState.setLocalTranslogStage();
-            if (safeCommit.isPresent() == false) {
-                logger.trace("skip local recovery as no safe commit found");
-                return UNASSIGNED_SEQ_NO;
-            }
-            assert safeCommit.get().localCheckpoint <= globalCheckpoint : safeCommit.get().localCheckpoint + " > " + globalCheckpoint;
-            if (safeCommit.get().localCheckpoint == globalCheckpoint) {
-                logger.trace(
-                    "skip local recovery as the safe commit is up to date; safe commit {} global checkpoint {}",
-                    safeCommit.get(),
-                    globalCheckpoint
-                );
-                recoveryState.getTranslog().totalLocal(0);
-                return globalCheckpoint + 1;
-            }
-            if (indexSettings.getIndexMetadata().getState() == IndexMetadata.State.CLOSE
-                || IndexMetadata.INDEX_BLOCKS_WRITE_SETTING.get(indexSettings.getSettings())) {
-                logger.trace(
-                    "skip local recovery as the index was closed or not allowed to write; safe commit {} global checkpoint {}",
-                    safeCommit.get(),
-                    globalCheckpoint
-                );
-                recoveryState.getTranslog().totalLocal(0);
-                return safeCommit.get().localCheckpoint + 1;
-            }
-            try {
+    }
+
+    private void doLocalRecovery(
+        long globalCheckpoint,
+        @SuppressWarnings("OptionalUsedAsFieldOrParameterType") Optional safeCommit,
+        ActionListener recoveryStartingSeqNoListener
+    ) {
+        maybeCheckIndex(); // check index here and won't do it again if ops-based recovery occurs
+        recoveryState.setLocalTranslogStage();
+        if (safeCommit.isPresent() == false) {
+            logger.trace("skip local recovery as no safe commit found");
+            recoveryStartingSeqNoListener.onResponse(UNASSIGNED_SEQ_NO);
+            return;
+        }
+
+        assert safeCommit.get().localCheckpoint <= globalCheckpoint : safeCommit.get().localCheckpoint + " > " + globalCheckpoint;
+        if (safeCommit.get().localCheckpoint == globalCheckpoint) {
+            logger.trace(
+                "skip local recovery as the safe commit is up to date; safe commit {} global checkpoint {}",
+                safeCommit.get(),
+                globalCheckpoint
+            );
+            recoveryState.getTranslog().totalLocal(0);
+            recoveryStartingSeqNoListener.onResponse(globalCheckpoint + 1);
+            return;
+        }
+
+        if (indexSettings.getIndexMetadata().getState() == IndexMetadata.State.CLOSE
+            || IndexMetadata.INDEX_BLOCKS_WRITE_SETTING.get(indexSettings.getSettings())) {
+            logger.trace(
+                "skip local recovery as the index was closed or not allowed to write; safe commit {} global checkpoint {}",
+                safeCommit.get(),
+                globalCheckpoint
+            );
+            recoveryState.getTranslog().totalLocal(0);
+            recoveryStartingSeqNoListener.onResponse(safeCommit.get().localCheckpoint + 1);
+            return;
+        }
+
+        SubscribableListener
+            // First, start a temporary engine, recover the local translog up to the given checkpoint, and then close the engine again.
+            .newForked(l -> ActionListener.runWithResource(ActionListener.assertOnce(l), () -> () -> {
+                assert Thread.holdsLock(mutex) == false : "must not hold the mutex here";
+                synchronized (engineMutex) {
+                    IOUtils.close(currentEngineReference.getAndSet(null));
+                }
+            }, (recoveryCompleteListener, ignoredRef) -> {
+                assert Thread.holdsLock(mutex) == false : "must not hold the mutex here";
                 final Engine.TranslogRecoveryRunner translogRecoveryRunner = (engine, snapshot) -> {
                     recoveryState.getTranslog().totalLocal(snapshot.totalOperations());
                     final int recoveredOps = runTranslogRecovery(
@@ -1805,29 +1833,34 @@ public long recoverLocallyUpToGlobalCheckpoint() {
                     return recoveredOps;
                 };
                 innerOpenEngineAndTranslog(() -> globalCheckpoint);
-                getEngine().recoverFromTranslog(translogRecoveryRunner, globalCheckpoint);
-                logger.trace("shard locally recovered up to {}", getEngine().getSeqNoStats(globalCheckpoint));
-            } finally {
-                synchronized (engineMutex) {
-                    IOUtils.close(currentEngineReference.getAndSet(null));
+                getEngine().recoverFromTranslog(translogRecoveryRunner, globalCheckpoint, recoveryCompleteListener.map(v -> {
+                    logger.trace("shard locally recovered up to {}", getEngine().getSeqNoStats(globalCheckpoint));
+                    return v;
+                }));
+            }))
+            // If the recovery replayed any operations then it will have created a new safe commit for the specified global checkpoint,
+            // which we can use for the rest of the recovery, so now we load the safe commit and use its local checkpoint as the recovery
+            // starting point.
+            .andThenApply(ignored -> {
+                assert Thread.holdsLock(mutex) == false : "must not hold the mutex here";
+                try {
+                    // we need to find the safe commit again as we should have created a new one during the local recovery
+                    final Optional newSafeCommit = store.findSafeIndexCommit(globalCheckpoint);
+                    assert newSafeCommit.isPresent() : "no safe commit found after local recovery";
+                    return newSafeCommit.get().localCheckpoint + 1;
+                } catch (Exception e) {
+                    logger.debug(
+                        () -> format(
+                            "failed to find the safe commit after recovering shard locally up to global checkpoint %s",
+                            globalCheckpoint
+                        ),
+                        e
+                    );
+                    return UNASSIGNED_SEQ_NO;
                 }
-            }
-        } catch (Exception e) {
-            logger.debug(() -> format("failed to recover shard locally up to global checkpoint %s", globalCheckpoint), e);
-            return UNASSIGNED_SEQ_NO;
-        }
-        try {
-            // we need to find the safe commit again as we should have created a new one during the local recovery
-            final Optional newSafeCommit = store.findSafeIndexCommit(globalCheckpoint);
-            assert newSafeCommit.isPresent() : "no safe commit found after local recovery";
-            return newSafeCommit.get().localCheckpoint + 1;
-        } catch (Exception e) {
-            logger.debug(
-                () -> format("failed to find the safe commit after recovering shard locally up to global checkpoint %s", globalCheckpoint),
-                e
-            );
-            return UNASSIGNED_SEQ_NO;
-        }
+            })
+
+            .addListener(recoveryStartingSeqNoListener);
     }
 
     public void trimOperationOfPreviousPrimaryTerms(long aboveSeqNo) {
diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java b/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java
index 487632546db21..a44c65f74c21d 100644
--- a/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java
+++ b/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java
@@ -379,8 +379,8 @@ record StartRecoveryRequestToSend(StartRecoveryRequest startRecoveryRequest, Str
             SubscribableListener
                 // run pre-recovery activities
                 .newForked(indexShard::preRecovery)
-                // recover the shard locally and construct the start-recovery request
-                .andThenApply(v -> {
+                // recover the shard as far as possible based on data held locally
+                .andThen((l, v) -> {
                     logger.trace("{} preparing shard for peer recovery", recoveryTarget.shardId());
                     indexShard.prepareForIndexRecovery();
                     if (indexShard.indexSettings().getIndexMetadata().isSearchableSnapshot()) {
@@ -394,7 +394,10 @@ record StartRecoveryRequestToSend(StartRecoveryRequest startRecoveryRequest, Str
                             store.decRef();
                         }
                     }
-                    final long startingSeqNo = indexShard.recoverLocallyUpToGlobalCheckpoint();
+                    indexShard.recoverLocallyUpToGlobalCheckpoint(ActionListener.assertOnce(l));
+                })
+                // now construct the start-recovery request
+                .andThenApply(startingSeqNo -> {
                     assert startingSeqNo == UNASSIGNED_SEQ_NO || recoveryTarget.state().getStage() == RecoveryState.Stage.TRANSLOG
                         : "unexpected recovery stage [" + recoveryTarget.state().getStage() + "] starting seqno [ " + startingSeqNo + "]";
                     final var startRequest = getStartRecoveryRequest(logger, clusterService.localNode(), recoveryTarget, startingSeqNo);
diff --git a/server/src/test/java/org/elasticsearch/action/ActionListenerTests.java b/server/src/test/java/org/elasticsearch/action/ActionListenerTests.java
index 6be145c6e9e33..3bdf5814878a7 100644
--- a/server/src/test/java/org/elasticsearch/action/ActionListenerTests.java
+++ b/server/src/test/java/org/elasticsearch/action/ActionListenerTests.java
@@ -8,6 +8,7 @@
 package org.elasticsearch.action;
 
 import org.apache.lucene.store.AlreadyClosedException;
+import org.elasticsearch.ElasticsearchException;
 import org.elasticsearch.action.support.PlainActionFuture;
 import org.elasticsearch.common.util.concurrent.AbstractRunnable;
 import org.elasticsearch.core.Assertions;
@@ -503,6 +504,77 @@ private static void completeListener(boolean successResponse, ActionListener();
+        final var successResult = new Object();
+        ActionListener.run(successFuture, l -> l.onResponse(successResult));
+        assertTrue(successFuture.isDone());
+        assertSame(successResult, successFuture.get());
+
+        final var failFuture = new PlainActionFuture<>();
+        final var failException = new ElasticsearchException("simulated");
+        ActionListener.run(failFuture, l -> {
+            if (randomBoolean()) {
+                l.onFailure(failException);
+            } else {
+                throw failException;
+            }
+        });
+        assertTrue(failFuture.isDone());
+        assertSame(failException, expectThrows(ExecutionException.class, ElasticsearchException.class, failFuture::get));
+    }
+
+    public void testRunWithResource() {
+        final var future = new PlainActionFuture<>();
+        final var successResult = new Object();
+        final var failException = new ElasticsearchException("simulated");
+        final var resourceIsClosed = new AtomicBoolean(false);
+        ActionListener.runWithResource(ActionListener.runBefore(future, () -> assertTrue(resourceIsClosed.get())), () -> new Releasable() {
+            @Override
+            public void close() {
+                assertTrue(resourceIsClosed.compareAndSet(false, true));
+            }
+
+            @Override
+            public String toString() {
+                return "test releasable";
+            }
+        }, (l, r) -> {
+            assertFalse(resourceIsClosed.get());
+            assertEquals("test releasable", r.toString());
+            if (randomBoolean()) {
+                l.onResponse(successResult);
+            } else {
+                if (randomBoolean()) {
+                    l.onFailure(failException);
+                } else {
+                    throw failException;
+                }
+            }
+        });
+
+        assertTrue(future.isDone());
+        try {
+            assertSame(successResult, future.get());
+        } catch (ExecutionException e) {
+            assertSame(failException, e.getCause());
+        } catch (InterruptedException e) {
+            fail(e);
+        }
+
+        final var failureFuture = new PlainActionFuture<>();
+        ActionListener.runWithResource(
+            failureFuture,
+            () -> { throw new ElasticsearchException("resource creation failure"); },
+            (l, r) -> fail("should not be called")
+        );
+        assertTrue(failureFuture.isDone());
+        assertEquals(
+            "resource creation failure",
+            expectThrows(ExecutionException.class, ElasticsearchException.class, failureFuture::get).getMessage()
+        );
+    }
+
     public void testReleaseAfter() {
         runReleaseAfterTest(true, false);
         runReleaseAfterTest(true, true);
diff --git a/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java
index 1d0a476369be3..022b9a75f9846 100644
--- a/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java
+++ b/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java
@@ -1160,6 +1160,6 @@ public static Engine.Warmer createTestWarmer(IndexSettings indexSettings) {
     }
 
     public static long recoverLocallyUpToGlobalCheckpoint(IndexShard indexShard) {
-        return indexShard.recoverLocallyUpToGlobalCheckpoint();
+        return PlainActionFuture.get(indexShard::recoverLocallyUpToGlobalCheckpoint, 10, TimeUnit.SECONDS);
     }
 }

From 00ca64bcf23a6dad9074e62f0a5d57af801272b0 Mon Sep 17 00:00:00 2001
From: Moritz Mack 
Date: Fri, 12 Jan 2024 10:30:04 +0100
Subject: [PATCH 61/75] Use allow-list for APM agent settings and consolidate
 defaults in APMJvmOptions (#104141)

Prevent invalid and miss-configuration of APM agent using an explicit allow-list of setting keys.
Additionally, configuration defaults of APMAgentSettings are consolidated in APMJvmOptions to keep defaults in a single location.
(ES-6916)
---
 .../gradle/testclusters/RunTask.java          |   6 +-
 .../server/cli/APMJvmOptions.java             |   3 +-
 .../apm/internal/APMAgentSettings.java        | 162 ++++++++++++++----
 .../apm/internal/APMAgentSettingsTests.java   |  43 ++---
 4 files changed, 145 insertions(+), 69 deletions(-)

diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/RunTask.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/RunTask.java
index ca2cbc09f7c2f..746a09d242761 100644
--- a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/RunTask.java
+++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/RunTask.java
@@ -201,7 +201,7 @@ public void beforeStart() {
                     try {
                         mockServer.start();
                         node.setting("telemetry.metrics.enabled", "true");
-                        node.setting("tracing.apm.agent.enabled", "true");
+                        node.setting("tracing.apm.enabled", "true");
                         node.setting("tracing.apm.agent.transaction_sample_rate", "0.10");
                         node.setting("tracing.apm.agent.metrics_interval", "10s");
                         node.setting("tracing.apm.agent.server_url", "http://127.0.0.1:" + mockServer.getPort());
@@ -213,8 +213,8 @@ public void beforeStart() {
                 // if metrics were not enabled explicitly for gradlew run we should disable them
                 else if (node.getSettingKeys().contains("telemetry.metrics.enabled") == false) { // metrics
                     node.setting("telemetry.metrics.enabled", "false");
-                } else if (node.getSettingKeys().contains("tracing.apm.agent.enabled") == false) { // tracing
-                    node.setting("tracing.apm.agent.enable", "false");
+                } else if (node.getSettingKeys().contains("tracing.apm.enabled") == false) { // tracing
+                    node.setting("tracing.apm.enable", "false");
                 }
 
             }
diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/APMJvmOptions.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/APMJvmOptions.java
index 9dcd630f52631..8531e22447a2d 100644
--- a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/APMJvmOptions.java
+++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/APMJvmOptions.java
@@ -79,7 +79,8 @@ class APMJvmOptions {
         "application_packages", "org.elasticsearch,org.apache.lucene",
         "metrics_interval", "120s",
         "breakdown_metrics", "false",
-        "central_config", "false"
+        "central_config", "false",
+        "transaction_sample_rate", "0.2"
         );
     // end::noformat
 
diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettings.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettings.java
index 41816318a3586..0ee13dae70740 100644
--- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettings.java
+++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettings.java
@@ -22,8 +22,8 @@
 import java.security.AccessController;
 import java.security.PrivilegedAction;
 import java.util.List;
-import java.util.Map;
 import java.util.Objects;
+import java.util.Set;
 
 import static org.elasticsearch.common.settings.Setting.Property.NodeScope;
 import static org.elasticsearch.common.settings.Setting.Property.OperatorDynamic;
@@ -36,17 +36,6 @@ public class APMAgentSettings {
 
     private static final Logger LOGGER = LogManager.getLogger(APMAgentSettings.class);
 
-    /**
-     * Sensible defaults that Elasticsearch configures. This cannot be done via the APM agent
-     * config file, as then their values could not be overridden dynamically via system properties.
-     */
-    static Map APM_AGENT_DEFAULT_SETTINGS = Map.of(
-        "transaction_sample_rate",
-        "0.2",
-        "enable_experimental_instrumentations",
-        "true"
-    );
-
     public void addClusterSettingsListeners(
         ClusterService clusterService,
         APMTelemetryProvider apmTelemetryProvider,
@@ -77,16 +66,7 @@ public void addClusterSettingsListeners(
      */
     public void syncAgentSystemProperties(Settings settings) {
         this.setAgentSetting("recording", Boolean.toString(APM_ENABLED_SETTING.get(settings)));
-
-        // Apply default values for some system properties. Although we configure
-        // the settings in APM_AGENT_DEFAULT_SETTINGS to defer to the default values, they won't
-        // do anything if those settings are never configured.
-        APM_AGENT_DEFAULT_SETTINGS.keySet()
-            .forEach(
-                key -> this.setAgentSetting(key, APM_AGENT_SETTINGS.getConcreteSetting(APM_AGENT_SETTINGS.getKey() + key).get(settings))
-            );
-
-        // Then apply values from the settings in the cluster state
+        // Apply values from the settings in the cluster state
         APM_AGENT_SETTINGS.getAsMap(settings).forEach(this::setAgentSetting);
     }
 
@@ -114,15 +94,130 @@ public void setAgentSetting(String key, String value) {
     private static final String APM_SETTING_PREFIX = "tracing.apm.";
 
     /**
-     * A list of APM agent config keys that should never be configured by the user.
+     * Allow-list of APM agent config keys users are permitted to configure.
+     * @see APM Java Agent Configuration
      */
-    private static final List PROHIBITED_AGENT_KEYS = List.of(
-        // ES generates a config file and sets this value
-        "config_file",
-        // ES controls this via `telemetry.metrics.enabled`
-        "recording",
-        // ES controls this via `apm.enabled`
-        "instrument"
+    private static final Set PERMITTED_AGENT_KEYS = Set.of(
+        // Circuit-Breaker:
+        "circuit_breaker_enabled",
+        "stress_monitoring_interval",
+        "stress_monitor_gc_stress_threshold",
+        "stress_monitor_gc_relief_threshold",
+        "stress_monitor_cpu_duration_threshold",
+        "stress_monitor_system_cpu_stress_threshold",
+        "stress_monitor_system_cpu_relief_threshold",
+
+        // Core:
+        // forbid 'enabled', must remain enabled to dynamically enable tracing / metrics
+        // forbid 'recording' / 'instrument', controlled by 'telemetry.metrics.enabled' / 'tracing.apm.enabled'
+        "service_name",
+        "service_node_name",
+        // forbid 'service_version', forced by APMJvmOptions
+        "hostname",
+        "environment",
+        "transaction_sample_rate",
+        "transaction_max_spans",
+        "long_field_max_length",
+        "sanitize_field_names",
+        "enable_instrumentations",
+        "disable_instrumentations",
+        // forbid 'enable_experimental_instrumentations', expected to be always enabled by APMJvmOptions
+        "unnest_exceptions",
+        "ignore_exceptions",
+        "capture_body",
+        "capture_headers",
+        "global_labels",
+        "instrument_ancient_bytecode",
+        "context_propagation_only",
+        "classes_excluded_from_instrumentation",
+        "trace_methods",
+        "trace_methods_duration_threshold",
+        // forbid 'central_config', may impact usage of config_file, disabled in APMJvmOptions
+        // forbid 'config_file', configured by APMJvmOptions
+        "breakdown_metrics",
+        "plugins_dir",
+        "use_elastic_traceparent_header",
+        "disable_outgoing_tracecontext_headers",
+        "span_min_duration",
+        "cloud_provider",
+        "enable_public_api_annotation_inheritance",
+        "transaction_name_groups",
+        "trace_continuation_strategy",
+        "baggage_to_attach",
+
+        // Datastore: irrelevant, not whitelisted
+
+        // HTTP:
+        "capture_body_content_types",
+        "transaction_ignore_urls",
+        "transaction_ignore_user_agents",
+        "use_path_as_transaction_name",
+        // forbid deprecated url_groups
+
+        // Huge Traces:
+        "span_compression_enabled",
+        "span_compression_exact_match_max_duration",
+        "span_compression_same_kind_max_duration",
+        "exit_span_min_duration",
+
+        // JAX-RS: irrelevant, not whitelisted
+
+        // JMX:
+        "capture_jmx_metrics",
+
+        // Logging:
+        "log_level", // allow overriding the default in APMJvmOptions
+        // forbid log_file, always set by APMJvmOptions
+        "log_ecs_reformatting",
+        "log_ecs_reformatting_additional_fields",
+        "log_ecs_formatter_allow_list",
+        // forbid log_ecs_reformatting_dir, always use logsDir provided in APMJvmOptions
+        "log_file_size",
+        // forbid log_format_sout, always use file logging
+        // forbid log_format_file, expected to be JSON in APMJvmOptions
+        "log_sending",
+
+        // Messaging: irrelevant, not whitelisted
+
+        // Metrics:
+        "dedot_custom_metrics",
+        "custom_metrics_histogram_boundaries",
+        "metric_set_limit",
+        "agent_reporter_health_metrics",
+        "agent_background_overhead_metrics",
+
+        // Profiling:
+        "profiling_inferred_spans_enabled",
+        "profiling_inferred_spans_logging_enabled",
+        "profiling_inferred_spans_sampling_interval",
+        "profiling_inferred_spans_min_duration",
+        "profiling_inferred_spans_included_classes",
+        "profiling_inferred_spans_excluded_classes",
+        "profiling_inferred_spans_lib_directory",
+
+        // Reporter:
+        // forbid secret_token: use tracing.apm.secret_token instead
+        // forbid api_key: use tracing.apm.api_key instead
+        "server_url",
+        "server_urls",
+        "disable_send",
+        "server_timeout",
+        "verify_server_cert",
+        "max_queue_size",
+        "include_process_args",
+        "api_request_time",
+        "api_request_size",
+        "metrics_interval",
+        "disable_metrics",
+
+        // Serverless:
+        "aws_lambda_handler",
+        "data_flush_timeout",
+
+        // Stacktraces:
+        "application_packages",
+        "stack_trace_limit",
+        "span_stack_trace_min_duration"
     );
 
     public static final Setting.AffixSetting APM_AGENT_SETTINGS = Setting.prefixKeySetting(
@@ -130,10 +225,9 @@ public void setAgentSetting(String key, String value) {
         (qualifiedKey) -> {
             final String[] parts = qualifiedKey.split("\\.");
             final String key = parts[parts.length - 1];
-            final String defaultValue = APM_AGENT_DEFAULT_SETTINGS.getOrDefault(key, "");
-            return new Setting<>(qualifiedKey, defaultValue, (value) -> {
-                if (PROHIBITED_AGENT_KEYS.contains(key)) {
-                    throw new IllegalArgumentException("Explicitly configuring [" + qualifiedKey + "] is prohibited");
+            return new Setting<>(qualifiedKey, "", (value) -> {
+                if (qualifiedKey.equals("_na_") == false && PERMITTED_AGENT_KEYS.contains(key) == false) {
+                    throw new IllegalArgumentException("Configuration [" + qualifiedKey + "] is either prohibited or unknown.");
                 }
                 return value;
             }, Setting.Property.NodeScope, Setting.Property.OperatorDynamic);
diff --git a/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettingsTests.java b/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettingsTests.java
index b22a57bb9bf0c..7457b97eebdde 100644
--- a/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettingsTests.java
+++ b/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettingsTests.java
@@ -11,8 +11,8 @@
 import org.elasticsearch.common.settings.Settings;
 import org.elasticsearch.test.ESTestCase;
 
+import static org.hamcrest.Matchers.containsString;
 import static org.mockito.Mockito.spy;
-import static org.mockito.Mockito.times;
 import static org.mockito.Mockito.verify;
 
 public class APMAgentSettingsTests extends ESTestCase {
@@ -20,7 +20,7 @@ public class APMAgentSettingsTests extends ESTestCase {
     /**
      * Check that when the tracer is enabled, it also sets the APM agent's recording system property to true.
      */
-    public void test_whenTracerEnabled_setsRecordingProperty() {
+    public void testEnableRecording() {
         APMAgentSettings apmAgentSettings = spy(new APMAgentSettings());
         Settings settings = Settings.builder().put(APMAgentSettings.APM_ENABLED_SETTING.getKey(), true).build();
         apmAgentSettings.syncAgentSystemProperties(settings);
@@ -31,7 +31,7 @@ public void test_whenTracerEnabled_setsRecordingProperty() {
     /**
      * Check that when the tracer is disabled, it also sets the APM agent's recording system property to false.
      */
-    public void test_whenTracerDisabled_setsRecordingProperty() {
+    public void testDisableRecording() {
         APMAgentSettings apmAgentSettings = spy(new APMAgentSettings());
         Settings settings = Settings.builder().put(APMAgentSettings.APM_ENABLED_SETTING.getKey(), false).build();
         apmAgentSettings.syncAgentSystemProperties(settings);
@@ -40,48 +40,29 @@ public void test_whenTracerDisabled_setsRecordingProperty() {
     }
 
     /**
-     * Check that when cluster settings are synchronised with the system properties, default values are
-     * applied.
+     * Check that when cluster settings are synchronised with the system properties, agent settings are set.
      */
-    public void test_whenTracerCreated_defaultSettingsApplied() {
-        APMAgentSettings apmAgentSettings = spy(new APMAgentSettings());
-        Settings settings = Settings.builder().put(APMAgentSettings.APM_ENABLED_SETTING.getKey(), true).build();
-        apmAgentSettings.syncAgentSystemProperties(settings);
-
-        verify(apmAgentSettings).setAgentSetting("transaction_sample_rate", "0.2");
-    }
-
-    /**
-     * Check that when cluster settings are synchronised with the system properties, values in the settings
-     * are reflected in the system properties, overwriting default values.
-     */
-    public void test_whenTracerCreated_clusterSettingsOverrideDefaults() {
+    public void testSetAgentSettings() {
         APMAgentSettings apmAgentSettings = spy(new APMAgentSettings());
         Settings settings = Settings.builder()
             .put(APMAgentSettings.APM_ENABLED_SETTING.getKey(), true)
-            .put(APMAgentSettings.APM_AGENT_SETTINGS.getKey() + "transaction_sample_rate", "0.75")
+            .put(APMAgentSettings.APM_AGENT_SETTINGS.getKey() + "span_compression_enabled", "true")
             .build();
         apmAgentSettings.syncAgentSystemProperties(settings);
 
-        // This happens twice because we first apply the default settings, whose values are overridden
-        // from the cluster settings, then we apply all the APM-agent related settings, not just the
-        // ones with default values. Although there is some redundancy here, it only happens at startup
-        // for a very small number of settings.
-        verify(apmAgentSettings, times(2)).setAgentSetting("transaction_sample_rate", "0.75");
+        verify(apmAgentSettings).setAgentSetting("span_compression_enabled", "true");
     }
 
     /**
-     * Check that when cluster settings are synchronised with the system properties, agent settings other
-     * than those with default values are set.
+     * Check that invalid or forbidden APM agent settings are rejected.
      */
-    public void test_whenTracerCreated_clusterSettingsAlsoApplied() {
-        APMAgentSettings apmAgentSettings = spy(new APMAgentSettings());
+    public void testRejectForbiddenOrUnknownSettings() {
         Settings settings = Settings.builder()
             .put(APMAgentSettings.APM_ENABLED_SETTING.getKey(), true)
-            .put(APMAgentSettings.APM_AGENT_SETTINGS.getKey() + "span_compression_enabled", "true")
+            .put(APMAgentSettings.APM_AGENT_SETTINGS.getKey() + "unknown", "true")
             .build();
-        apmAgentSettings.syncAgentSystemProperties(settings);
 
-        verify(apmAgentSettings).setAgentSetting("span_compression_enabled", "true");
+        Exception exception = expectThrows(IllegalArgumentException.class, () -> APMAgentSettings.APM_AGENT_SETTINGS.getAsMap(settings));
+        assertThat(exception.getMessage(), containsString("[tracing.apm.agent.unknown]"));
     }
 }

From 1b2e8cf5a2b6faa8386905aaa42911bf20f2c9cb Mon Sep 17 00:00:00 2001
From: David Kyle 
Date: Fri, 12 Jan 2024 09:34:35 +0000
Subject: [PATCH 62/75] [ML] Refactor InferenceConfigUpdate for simpler changes
 to tokenization options (#104277)

---
 .../BertJapaneseTokenization.java             |  5 ++
 .../trainedmodel/BertTokenization.java        |  5 ++
 .../trainedmodel/ClassificationConfig.java    | 27 +++++++
 .../ClassificationConfigUpdate.java           | 41 ----------
 .../trainedmodel/EmptyConfigUpdate.java       |  8 +-
 .../trainedmodel/FillMaskConfig.java          | 22 +++++
 .../trainedmodel/FillMaskConfigUpdate.java    | 35 --------
 .../trainedmodel/InferenceConfig.java         | 18 +++++
 .../trainedmodel/InferenceConfigUpdate.java   | 10 ++-
 .../trainedmodel/MPNetTokenization.java       |  5 ++
 .../ml/inference/trainedmodel/NerConfig.java  | 17 ++++
 .../trainedmodel/NerConfigUpdate.java         | 23 ------
 .../trainedmodel/NullInferenceConfig.java     |  5 ++
 .../trainedmodel/PassThroughConfig.java       | 16 ++++
 .../trainedmodel/PassThroughConfigUpdate.java | 24 ------
 .../trainedmodel/QuestionAnsweringConfig.java | 26 ++++++
 .../QuestionAnsweringConfigUpdate.java        | 32 --------
 .../trainedmodel/RegressionConfig.java        | 18 +++++
 .../trainedmodel/RegressionConfigUpdate.java  | 30 -------
 .../trainedmodel/ResultsFieldUpdate.java      | 17 ----
 .../trainedmodel/RobertaTokenization.java     | 12 +++
 .../TextClassificationConfig.java             | 34 ++++++++
 .../TextClassificationConfigUpdate.java       | 48 -----------
 .../trainedmodel/TextEmbeddingConfig.java     | 17 ++++
 .../TextEmbeddingConfigUpdate.java            | 23 ------
 .../trainedmodel/TextExpansionConfig.java     | 16 ++++
 .../TextExpansionConfigUpdate.java            | 28 -------
 .../trainedmodel/TextSimilarityConfig.java    | 18 +++++
 .../TextSimilarityConfigUpdate.java           | 26 +-----
 .../inference/trainedmodel/Tokenization.java  | 77 ++++++++++++++----
 .../TokenizationConfigUpdate.java             | 81 +++++++++++++++++++
 .../trainedmodel/XLMRobertaTokenization.java  |  5 ++
 .../ZeroShotClassificationConfig.java         | 37 +++++++++
 .../ZeroShotClassificationConfigUpdate.java   | 40 ---------
 .../BertJapaneseTokenizationTests.java        |  7 ++
 .../trainedmodel/BertTokenizationTests.java   |  7 ++
 .../ClassificationConfigUpdateTests.java      | 19 ++---
 .../FillMaskConfigUpdateTests.java            | 34 ++------
 .../trainedmodel/NerConfigUpdateTests.java    | 13 +--
 .../PassThroughConfigUpdateTests.java         | 13 +--
 .../QuestionAnsweringConfigUpdateTests.java   | 27 ++++---
 .../RegressionConfigUpdateTests.java          | 11 ++-
 .../trainedmodel/ResultsFieldUpdateTests.java |  4 +-
 .../RobertaTokenizationTests.java             |  7 ++
 .../TextClassificationConfigUpdateTests.java  | 54 +++----------
 .../TextEmbeddingConfigUpdateTests.java       | 13 +--
 .../TextSimilarityConfigUpdateTests.java      | 18 ++---
 .../TokenizationConfigUpdateTests.java        | 32 ++++++++
 ...roShotClassificationConfigUpdateTests.java | 46 +++--------
 .../TrainedModelDeploymentTask.java           | 12 +--
 .../inference/loadingservice/LocalModel.java  |  5 +-
 .../ZeroShotClassificationProcessorTests.java |  2 +-
 52 files changed, 614 insertions(+), 556 deletions(-)
 create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TokenizationConfigUpdate.java
 create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TokenizationConfigUpdateTests.java

diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/BertJapaneseTokenization.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/BertJapaneseTokenization.java
index 269d803a698bf..392258608acd1 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/BertJapaneseTokenization.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/BertJapaneseTokenization.java
@@ -59,6 +59,11 @@ public BertJapaneseTokenization(StreamInput in) throws IOException {
         super(in);
     }
 
+    @Override
+    Tokenization buildWindowingTokenization(int updatedMaxSeqLength, int updatedSpan) {
+        return new BertJapaneseTokenization(this.doLowerCase, this.withSpecialTokens, updatedMaxSeqLength, Truncate.NONE, updatedSpan);
+    }
+
     XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
         return builder;
     }
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/BertTokenization.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/BertTokenization.java
index a950a18fcab2b..a229227df60e9 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/BertTokenization.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/BertTokenization.java
@@ -60,6 +60,11 @@ public BertTokenization(StreamInput in) throws IOException {
         super(in);
     }
 
+    @Override
+    Tokenization buildWindowingTokenization(int updatedMaxSeqLength, int updatedSpan) {
+        return new BertTokenization(this.doLowerCase, this.withSpecialTokens, updatedMaxSeqLength, Truncate.NONE, updatedSpan);
+    }
+
     @Override
     public void writeTo(StreamOutput out) throws IOException {
         super.writeTo(out);
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ClassificationConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ClassificationConfig.java
index 156fd76a9419c..cc66c361925a6 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ClassificationConfig.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ClassificationConfig.java
@@ -109,6 +109,33 @@ public ClassificationConfig(StreamInput in) throws IOException {
         this.predictionFieldType = PredictionFieldType.fromStream(in);
     }
 
+    @Override
+    public InferenceConfig apply(InferenceConfigUpdate update) {
+        if (update instanceof ClassificationConfigUpdate configUpdate) {
+            ClassificationConfig.Builder builder = new ClassificationConfig.Builder(this);
+            if (configUpdate.getResultsField() != null) {
+                builder.setResultsField(configUpdate.getResultsField());
+            }
+            if (configUpdate.getNumTopFeatureImportanceValues() != null) {
+                builder.setNumTopFeatureImportanceValues(configUpdate.getNumTopFeatureImportanceValues());
+            }
+            if (configUpdate.getTopClassesResultsField() != null) {
+                builder.setTopClassesResultsField(configUpdate.getTopClassesResultsField());
+            }
+            if (configUpdate.getNumTopClasses() != null) {
+                builder.setNumTopClasses(configUpdate.getNumTopClasses());
+            }
+            if (configUpdate.getPredictionFieldType() != null) {
+                builder.setPredictionFieldType(configUpdate.getPredictionFieldType());
+            }
+            return builder.build();
+        } else if (update instanceof ResultsFieldUpdate resultsFieldUpdate) {
+            return new ClassificationConfig.Builder(this).setResultsField(resultsFieldUpdate.getResultsField()).build();
+        } else {
+            throw incompatibleUpdateException(update.getName());
+        }
+    }
+
     public int getNumTopClasses() {
         return numTopClasses;
     }
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ClassificationConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ClassificationConfigUpdate.java
index a036427abbe48..de4004792af7c 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ClassificationConfigUpdate.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ClassificationConfigUpdate.java
@@ -203,52 +203,11 @@ public String getName() {
         return NAME.getPreferredName();
     }
 
-    @Override
-    public InferenceConfig apply(InferenceConfig originalConfig) {
-        if (originalConfig instanceof ClassificationConfig == false) {
-            throw ExceptionsHelper.badRequestException(
-                "Inference config of type [{}] can not be updated with a inference request of type [{}]",
-                originalConfig.getName(),
-                getName()
-            );
-        }
-        ClassificationConfig classificationConfig = (ClassificationConfig) originalConfig;
-
-        if (isNoop(classificationConfig)) {
-            return originalConfig;
-        }
-        ClassificationConfig.Builder builder = new ClassificationConfig.Builder(classificationConfig);
-        if (resultsField != null) {
-            builder.setResultsField(resultsField);
-        }
-        if (numTopFeatureImportanceValues != null) {
-            builder.setNumTopFeatureImportanceValues(numTopFeatureImportanceValues);
-        }
-        if (topClassesResultsField != null) {
-            builder.setTopClassesResultsField(topClassesResultsField);
-        }
-        if (numTopClasses != null) {
-            builder.setNumTopClasses(numTopClasses);
-        }
-        if (predictionFieldType != null) {
-            builder.setPredictionFieldType(predictionFieldType);
-        }
-        return builder.build();
-    }
-
     @Override
     public boolean isSupported(InferenceConfig inferenceConfig) {
         return inferenceConfig instanceof ClassificationConfig;
     }
 
-    boolean isNoop(ClassificationConfig originalConfig) {
-        return (resultsField == null || resultsField.equals(originalConfig.getResultsField()))
-            && (numTopFeatureImportanceValues == null || originalConfig.getNumTopFeatureImportanceValues() == numTopFeatureImportanceValues)
-            && (topClassesResultsField == null || topClassesResultsField.equals(originalConfig.getTopClassesResultsField()))
-            && (numTopClasses == null || originalConfig.getNumTopClasses() == numTopClasses)
-            && (predictionFieldType == null || predictionFieldType.equals(originalConfig.getPredictionFieldType()));
-    }
-
     @Override
     public TransportVersion getMinimalSupportedVersion() {
         return TransportVersions.V_7_8_0;
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/EmptyConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/EmptyConfigUpdate.java
index c098b13fd1deb..feb3a2e3191ff 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/EmptyConfigUpdate.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/EmptyConfigUpdate.java
@@ -30,13 +30,13 @@ public EmptyConfigUpdate() {}
     public EmptyConfigUpdate(StreamInput in) {}
 
     @Override
-    public String getResultsField() {
-        return null;
+    public boolean isEmpty() {
+        return true;
     }
 
     @Override
-    public InferenceConfig apply(InferenceConfig originalConfig) {
-        return originalConfig;
+    public String getResultsField() {
+        return null;
     }
 
     @Override
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/FillMaskConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/FillMaskConfig.java
index 24b7a95c9ccac..ab45c2f420bd9 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/FillMaskConfig.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/FillMaskConfig.java
@@ -99,6 +99,28 @@ public FillMaskConfig(StreamInput in) throws IOException {
         resultsField = in.readOptionalString();
     }
 
+    @Override
+    public InferenceConfig apply(InferenceConfigUpdate update) {
+        if (update instanceof FillMaskConfigUpdate configUpdate) {
+            FillMaskConfig.Builder builder = new FillMaskConfig.Builder(this);
+            if (configUpdate.getNumTopClasses() != null) {
+                builder.setNumTopClasses(configUpdate.getNumTopClasses());
+            }
+            if (configUpdate.getResultsField() != null) {
+                builder.setResultsField(configUpdate.getResultsField());
+            }
+            if (configUpdate.getTokenizationUpdate() != null) {
+                builder.setTokenization(configUpdate.getTokenizationUpdate().apply(this.getTokenization()));
+            }
+            return builder.build();
+        } else if (update instanceof TokenizationConfigUpdate tokenizationUpdate) {
+            FillMaskConfig.Builder builder = new FillMaskConfig.Builder(this);
+            return builder.setTokenization(this.getTokenization().updateSpanSettings(tokenizationUpdate.getSpanSettings())).build();
+        } else {
+            throw incompatibleUpdateException(update.getName());
+        }
+    }
+
     @Override
     public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
         builder.startObject();
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/FillMaskConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/FillMaskConfigUpdate.java
index cb081aa48d0a2..9ac4ea9cf18e7 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/FillMaskConfigUpdate.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/FillMaskConfigUpdate.java
@@ -108,41 +108,6 @@ public TransportVersion getMinimalSupportedVersion() {
         return TransportVersions.V_8_0_0;
     }
 
-    @Override
-    public InferenceConfig apply(InferenceConfig originalConfig) {
-        if (originalConfig instanceof FillMaskConfig == false) {
-            throw ExceptionsHelper.badRequestException(
-                "Inference config of type [{}] can not be updated with a request of type [{}]",
-                originalConfig.getName(),
-                getName()
-            );
-        }
-
-        FillMaskConfig fillMaskConfig = (FillMaskConfig) originalConfig;
-        if (isNoop(fillMaskConfig)) {
-            return originalConfig;
-        }
-
-        FillMaskConfig.Builder builder = new FillMaskConfig.Builder(fillMaskConfig);
-        if (numTopClasses != null) {
-            builder.setNumTopClasses(numTopClasses);
-        }
-        if (resultsField != null) {
-            builder.setResultsField(resultsField);
-        }
-        if (tokenizationUpdate != null) {
-            builder.setTokenization(tokenizationUpdate.apply(fillMaskConfig.getTokenization()));
-
-        }
-        return builder.build();
-    }
-
-    boolean isNoop(FillMaskConfig originalConfig) {
-        return (this.numTopClasses == null || this.numTopClasses == originalConfig.getNumTopClasses())
-            && (this.resultsField == null || this.resultsField.equals(originalConfig.getResultsField()))
-            && super.isNoop();
-    }
-
     @Override
     public boolean isSupported(InferenceConfig config) {
         return config instanceof FillMaskConfig;
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/InferenceConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/InferenceConfig.java
index 2b043cf022a3d..8733e456157d2 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/InferenceConfig.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/InferenceConfig.java
@@ -6,10 +6,12 @@
  */
 package org.elasticsearch.xpack.core.ml.inference.trainedmodel;
 
+import org.elasticsearch.ElasticsearchStatusException;
 import org.elasticsearch.TransportVersion;
 import org.elasticsearch.common.io.stream.VersionedNamedWriteable;
 import org.elasticsearch.xcontent.ParseField;
 import org.elasticsearch.xpack.core.ml.MlConfigVersion;
+import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper;
 import org.elasticsearch.xpack.core.ml.utils.NamedXContentObject;
 
 public interface InferenceConfig extends NamedXContentObject, VersionedNamedWriteable {
@@ -20,6 +22,14 @@ public interface InferenceConfig extends NamedXContentObject, VersionedNamedWrit
 
     boolean isTargetTypeSupported(TargetType targetType);
 
+    /**
+     * Return a copy of this with the settings updated by the
+     * values in {@code update}.
+     * @param update The update to apply
+     * @return A new updated config
+     */
+    InferenceConfig apply(InferenceConfigUpdate update);
+
     @Override
     default TransportVersion getMinimalSupportedVersion() {
         return getMinimalSupportedTransportVersion();
@@ -54,4 +64,12 @@ default boolean supportsPipelineAggregation() {
     default boolean supportsSearchRescorer() {
         return false;
     }
+
+    default ElasticsearchStatusException incompatibleUpdateException(String updateName) {
+        throw ExceptionsHelper.badRequestException(
+            "Inference config of type [{}] can not be updated with a inference request of type [{}]",
+            getName(),
+            updateName
+        );
+    }
 }
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/InferenceConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/InferenceConfigUpdate.java
index 30ecac00a3b80..50d516378e92d 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/InferenceConfigUpdate.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/InferenceConfigUpdate.java
@@ -20,10 +20,16 @@ public interface InferenceConfigUpdate extends VersionedNamedWriteable {
         Arrays.asList(WarningInferenceResults.WARNING.getPreferredName(), TrainedModelConfig.MODEL_ID.getPreferredName())
     );
 
-    InferenceConfig apply(InferenceConfig originalConfig);
-
     boolean isSupported(InferenceConfig config);
 
+    /**
+     * Is this an empty update.
+     * @return True if empty
+     */
+    default boolean isEmpty() {
+        return false;
+    }
+
     String getResultsField();
 
     interface Builder, U extends InferenceConfigUpdate> {
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/MPNetTokenization.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/MPNetTokenization.java
index e18a1d056f57c..9e599eb86b8ad 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/MPNetTokenization.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/MPNetTokenization.java
@@ -59,6 +59,11 @@ public MPNetTokenization(StreamInput in) throws IOException {
         super(in);
     }
 
+    @Override
+    Tokenization buildWindowingTokenization(int updatedMaxSeqLength, int updatedSpan) {
+        return new MPNetTokenization(this.doLowerCase, this.withSpecialTokens, updatedMaxSeqLength, Truncate.NONE, updatedSpan);
+    }
+
     @Override
     public void writeTo(StreamOutput out) throws IOException {
         super.writeTo(out);
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/NerConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/NerConfig.java
index e7f3a66b6748f..b87e7e7edbb71 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/NerConfig.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/NerConfig.java
@@ -157,6 +157,23 @@ public boolean isTargetTypeSupported(TargetType targetType) {
         return false;
     }
 
+    @Override
+    public InferenceConfig apply(InferenceConfigUpdate update) {
+        if (update instanceof NerConfigUpdate configUpdate) {
+            return new NerConfig(
+                vocabularyConfig,
+                (configUpdate.getTokenizationUpdate() == null) ? tokenization : configUpdate.getTokenizationUpdate().apply(tokenization),
+                classificationLabels,
+                Optional.ofNullable(update.getResultsField()).orElse(resultsField)
+            );
+        } else if (update instanceof TokenizationConfigUpdate tokenizationUpdate) {
+            var updatedTokenization = getTokenization().updateSpanSettings(tokenizationUpdate.getSpanSettings());
+            return new NerConfig(this.vocabularyConfig, updatedTokenization, this.classificationLabels, this.resultsField);
+        } else {
+            throw incompatibleUpdateException(update.getName());
+        }
+    }
+
     @Override
     public MlConfigVersion getMinimalSupportedMlConfigVersion() {
         return MlConfigVersion.V_8_0_0;
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/NerConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/NerConfigUpdate.java
index 884ecb39df448..015aa658b1658 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/NerConfigUpdate.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/NerConfigUpdate.java
@@ -20,7 +20,6 @@
 import java.util.HashMap;
 import java.util.Map;
 import java.util.Objects;
-import java.util.Optional;
 
 import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.NlpConfig.RESULTS_FIELD;
 import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.NlpConfig.TOKENIZATION;
@@ -92,28 +91,6 @@ public String getName() {
         return NAME;
     }
 
-    @Override
-    public InferenceConfig apply(InferenceConfig originalConfig) {
-        if (originalConfig instanceof NerConfig == false) {
-            throw ExceptionsHelper.badRequestException(
-                "Inference config of type [{}] can not be updated with a request of type [{}]",
-                originalConfig.getName(),
-                getName()
-            );
-        }
-        NerConfig nerConfig = (NerConfig) originalConfig;
-        if (isNoop(nerConfig)) {
-            return nerConfig;
-        }
-
-        return new NerConfig(
-            nerConfig.getVocabularyConfig(),
-            (tokenizationUpdate == null) ? nerConfig.getTokenization() : tokenizationUpdate.apply(nerConfig.getTokenization()),
-            nerConfig.getClassificationLabels(),
-            Optional.ofNullable(resultsField).orElse(nerConfig.getResultsField())
-        );
-    }
-
     boolean isNoop(NerConfig originalConfig) {
         return (this.resultsField == null || this.resultsField.equals(originalConfig.getResultsField())) && super.isNoop();
     }
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/NullInferenceConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/NullInferenceConfig.java
index 67d0edb880a66..dae96dc9a684c 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/NullInferenceConfig.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/NullInferenceConfig.java
@@ -29,6 +29,11 @@ public boolean isTargetTypeSupported(TargetType targetType) {
         return true;
     }
 
+    @Override
+    public InferenceConfig apply(InferenceConfigUpdate update) {
+        throw new UnsupportedOperationException("Cannot update NullInferenceConfig objects");
+    }
+
     @Override
     public MlConfigVersion getMinimalSupportedMlConfigVersion() {
         return MlConfigVersion.CURRENT;
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/PassThroughConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/PassThroughConfig.java
index 74ca76779d4b2..0e27fc00b9b70 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/PassThroughConfig.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/PassThroughConfig.java
@@ -120,6 +120,22 @@ public boolean isTargetTypeSupported(TargetType targetType) {
         return false;
     }
 
+    @Override
+    public InferenceConfig apply(InferenceConfigUpdate update) {
+        if (update instanceof PassThroughConfigUpdate configUpdate) {
+            return new PassThroughConfig(
+                vocabularyConfig,
+                (configUpdate.getTokenizationUpdate() == null) ? tokenization : configUpdate.getTokenizationUpdate().apply(tokenization),
+                update.getResultsField() == null ? resultsField : update.getResultsField()
+            );
+        } else if (update instanceof TokenizationConfigUpdate tokenizationUpdate) {
+            var updatedTokenization = getTokenization().updateSpanSettings(tokenizationUpdate.getSpanSettings());
+            return new PassThroughConfig(this.vocabularyConfig, updatedTokenization, this.resultsField);
+        } else {
+            throw incompatibleUpdateException(update.getName());
+        }
+    }
+
     @Override
     public MlConfigVersion getMinimalSupportedMlConfigVersion() {
         return MlConfigVersion.V_8_0_0;
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/PassThroughConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/PassThroughConfigUpdate.java
index 874f82dc019ca..1a7832a70cfdf 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/PassThroughConfigUpdate.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/PassThroughConfigUpdate.java
@@ -96,30 +96,6 @@ public String getName() {
         return NAME;
     }
 
-    @Override
-    public InferenceConfig apply(InferenceConfig originalConfig) {
-        if ((resultsField == null || resultsField.equals(originalConfig.getResultsField())) && super.isNoop()) {
-            return originalConfig;
-        }
-
-        if (originalConfig instanceof PassThroughConfig == false) {
-            throw ExceptionsHelper.badRequestException(
-                "Inference config of type [{}] can not be updated with a inference request of type [{}]",
-                originalConfig.getName(),
-                getName()
-            );
-        }
-
-        PassThroughConfig passThroughConfig = (PassThroughConfig) originalConfig;
-        return new PassThroughConfig(
-            passThroughConfig.getVocabularyConfig(),
-            (tokenizationUpdate == null)
-                ? passThroughConfig.getTokenization()
-                : tokenizationUpdate.apply(passThroughConfig.getTokenization()),
-            resultsField == null ? originalConfig.getResultsField() : resultsField
-        );
-    }
-
     @Override
     public boolean isSupported(InferenceConfig config) {
         return config instanceof PassThroughConfig;
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/QuestionAnsweringConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/QuestionAnsweringConfig.java
index 7572d757f2b5f..014cdb1dd891f 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/QuestionAnsweringConfig.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/QuestionAnsweringConfig.java
@@ -188,6 +188,32 @@ public boolean isTargetTypeSupported(TargetType targetType) {
         return false;
     }
 
+    @Override
+    public InferenceConfig apply(InferenceConfigUpdate update) {
+        if (update instanceof QuestionAnsweringConfigUpdate configUpdate) {
+            return new QuestionAnsweringConfig(
+                configUpdate.getQuestion(),
+                Optional.ofNullable(configUpdate.getNumTopClasses()).orElse(numTopClasses),
+                Optional.ofNullable(configUpdate.getMaxAnswerLength()).orElse(maxAnswerLength),
+                vocabularyConfig,
+                configUpdate.tokenizationUpdate == null ? tokenization : configUpdate.tokenizationUpdate.apply(tokenization),
+                Optional.ofNullable(configUpdate.getResultsField()).orElse(resultsField)
+            );
+        } else if (update instanceof TokenizationConfigUpdate tokenizationUpdate) {
+            var updatedTokenization = getTokenization().updateSpanSettings(tokenizationUpdate.getSpanSettings());
+            return new QuestionAnsweringConfig(
+                question,
+                numTopClasses,
+                maxAnswerLength,
+                vocabularyConfig,
+                updatedTokenization,
+                resultsField
+            );
+        } else {
+            throw incompatibleUpdateException(update.getName());
+        }
+    }
+
     @Override
     public MlConfigVersion getMinimalSupportedMlConfigVersion() {
         return MlConfigVersion.V_8_3_0;
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/QuestionAnsweringConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/QuestionAnsweringConfigUpdate.java
index 40657544a14d5..df4cb565731ed 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/QuestionAnsweringConfigUpdate.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/QuestionAnsweringConfigUpdate.java
@@ -22,7 +22,6 @@
 import java.util.HashMap;
 import java.util.Map;
 import java.util.Objects;
-import java.util.Optional;
 
 import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.NlpConfig.NUM_TOP_CLASSES;
 import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.NlpConfig.RESULTS_FIELD;
@@ -126,37 +125,6 @@ public String getWriteableName() {
         return NAME;
     }
 
-    @Override
-    public InferenceConfig apply(InferenceConfig originalConfig) {
-        if (originalConfig instanceof QuestionAnsweringConfig == false) {
-            throw ExceptionsHelper.badRequestException(
-                "Inference config of type [{}] can not be updated with a inference request of type [{}]",
-                originalConfig.getName(),
-                getName()
-            );
-        }
-
-        QuestionAnsweringConfig questionAnsweringConfig = (QuestionAnsweringConfig) originalConfig;
-        return new QuestionAnsweringConfig(
-            question,
-            Optional.ofNullable(numTopClasses).orElse(questionAnsweringConfig.getNumTopClasses()),
-            Optional.ofNullable(maxAnswerLength).orElse(questionAnsweringConfig.getMaxAnswerLength()),
-            questionAnsweringConfig.getVocabularyConfig(),
-            tokenizationUpdate == null
-                ? questionAnsweringConfig.getTokenization()
-                : tokenizationUpdate.apply(questionAnsweringConfig.getTokenization()),
-            Optional.ofNullable(resultsField).orElse(questionAnsweringConfig.getResultsField())
-        );
-    }
-
-    boolean isNoop(QuestionAnsweringConfig originalConfig) {
-        return (numTopClasses == null || numTopClasses.equals(originalConfig.getNumTopClasses()))
-            && (maxAnswerLength == null || maxAnswerLength.equals(originalConfig.getMaxAnswerLength()))
-            && (resultsField == null || resultsField.equals(originalConfig.getResultsField()))
-            && (question == null || question.equals(originalConfig.getQuestion()))
-            && super.isNoop();
-    }
-
     @Override
     public boolean isSupported(InferenceConfig config) {
         return config instanceof QuestionAnsweringConfig;
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RegressionConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RegressionConfig.java
index 8ea53b2725523..337a1ac693128 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RegressionConfig.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RegressionConfig.java
@@ -135,6 +135,24 @@ public boolean isTargetTypeSupported(TargetType targetType) {
         return TargetType.REGRESSION.equals(targetType);
     }
 
+    @Override
+    public InferenceConfig apply(InferenceConfigUpdate update) {
+        if (update instanceof RegressionConfigUpdate configUpdate) {
+            RegressionConfig.Builder builder = new RegressionConfig.Builder(this);
+            if (configUpdate.getResultsField() != null) {
+                builder.setResultsField(configUpdate.getResultsField());
+            }
+            if (configUpdate.getNumTopFeatureImportanceValues() != null) {
+                builder.setNumTopFeatureImportanceValues(configUpdate.getNumTopFeatureImportanceValues());
+            }
+            return builder.build();
+        } else if (update instanceof ResultsFieldUpdate resultsFieldUpdate) {
+            return new RegressionConfig.Builder(this).setResultsField(resultsFieldUpdate.getResultsField()).build();
+        } else {
+            throw incompatibleUpdateException(update.getName());
+        }
+    }
+
     @Override
     public MlConfigVersion getMinimalSupportedMlConfigVersion() {
         return requestingImportance() ? MlConfigVersion.V_7_7_0 : MIN_SUPPORTED_VERSION;
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RegressionConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RegressionConfigUpdate.java
index a678806181ef8..dc1a7bdeef104 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RegressionConfigUpdate.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RegressionConfigUpdate.java
@@ -144,41 +144,11 @@ public int hashCode() {
         return Objects.hash(resultsField, numTopFeatureImportanceValues);
     }
 
-    @Override
-    public InferenceConfig apply(InferenceConfig originalConfig) {
-        if (originalConfig instanceof RegressionConfig == false) {
-            throw ExceptionsHelper.badRequestException(
-                "Inference config of type [{}] can not be updated with a inference request of type [{}]",
-                originalConfig.getName(),
-                getName()
-            );
-        }
-
-        RegressionConfig regressionConfig = (RegressionConfig) originalConfig;
-        if (isNoop(regressionConfig)) {
-            return originalConfig;
-        }
-        RegressionConfig.Builder builder = new RegressionConfig.Builder(regressionConfig);
-        if (resultsField != null) {
-            builder.setResultsField(resultsField);
-        }
-        if (numTopFeatureImportanceValues != null) {
-            builder.setNumTopFeatureImportanceValues(numTopFeatureImportanceValues);
-        }
-        return builder.build();
-    }
-
     @Override
     public boolean isSupported(InferenceConfig inferenceConfig) {
         return inferenceConfig instanceof RegressionConfig;
     }
 
-    boolean isNoop(RegressionConfig originalConfig) {
-        return (resultsField == null || originalConfig.getResultsField().equals(resultsField))
-            && (numTopFeatureImportanceValues == null
-                || originalConfig.getNumTopFeatureImportanceValues() == numTopFeatureImportanceValues);
-    }
-
     public static class Builder implements InferenceConfigUpdate.Builder {
         private String resultsField;
         private Integer numTopFeatureImportanceValues;
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ResultsFieldUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ResultsFieldUpdate.java
index fe1fb9844610d..34d3b1c1e38f5 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ResultsFieldUpdate.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ResultsFieldUpdate.java
@@ -11,7 +11,6 @@
 import org.elasticsearch.TransportVersions;
 import org.elasticsearch.common.io.stream.StreamInput;
 import org.elasticsearch.common.io.stream.StreamOutput;
-import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper;
 
 import java.io.IOException;
 import java.util.Objects;
@@ -34,22 +33,6 @@ public ResultsFieldUpdate(StreamInput in) throws IOException {
         resultsField = in.readString();
     }
 
-    @Override
-    public InferenceConfig apply(InferenceConfig originalConfig) {
-        if (originalConfig instanceof ClassificationConfig) {
-            ClassificationConfigUpdate update = new ClassificationConfigUpdate(null, resultsField, null, null, null);
-            return update.apply(originalConfig);
-        } else if (originalConfig instanceof RegressionConfig) {
-            RegressionConfigUpdate update = new RegressionConfigUpdate(resultsField, null);
-            return update.apply(originalConfig);
-        } else {
-            throw ExceptionsHelper.badRequestException(
-                "Inference config of unknown type [{}] can not be updated",
-                originalConfig.getName()
-            );
-        }
-    }
-
     @Override
     public boolean isSupported(InferenceConfig config) {
         return true;
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RobertaTokenization.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RobertaTokenization.java
index febb7cb40ec82..bbb35ad70b90d 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RobertaTokenization.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RobertaTokenization.java
@@ -85,6 +85,18 @@ public RobertaTokenization(StreamInput in) throws IOException {
         this.addPrefixSpace = in.readBoolean();
     }
 
+    @Override
+    Tokenization buildWindowingTokenization(int updatedMaxSeqLength, int updatedSpan) {
+        return new RobertaTokenization(
+            this.doLowerCase,
+            this.withSpecialTokens,
+            updatedMaxSeqLength,
+            Truncate.NONE,
+            updatedSpan,
+            this.addPrefixSpace
+        );
+    }
+
     public boolean isAddPrefixSpace() {
         return addPrefixSpace;
     }
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextClassificationConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextClassificationConfig.java
index ab50f26636fc4..153879d4f61b4 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextClassificationConfig.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextClassificationConfig.java
@@ -133,6 +133,40 @@ public boolean isTargetTypeSupported(TargetType targetType) {
         return false;
     }
 
+    @Override
+    public InferenceConfig apply(InferenceConfigUpdate update) {
+        if (update instanceof TextClassificationConfigUpdate configUpdate) {
+            TextClassificationConfig.Builder builder = new TextClassificationConfig.Builder(this);
+            if (configUpdate.getNumTopClasses() != null) {
+                builder.setNumTopClasses(configUpdate.getNumTopClasses());
+            }
+            if (configUpdate.getClassificationLabels() != null) {
+                if (classificationLabels.size() != configUpdate.getClassificationLabels().size()) {
+                    throw ExceptionsHelper.badRequestException(
+                        "The number of [{}] the model is defined with [{}] does not match the number in the update [{}]",
+                        CLASSIFICATION_LABELS,
+                        classificationLabels.size(),
+                        configUpdate.getClassificationLabels().size()
+                    );
+                }
+                builder.setClassificationLabels(configUpdate.getClassificationLabels());
+            }
+            if (configUpdate.getResultsField() != null) {
+                builder.setResultsField(configUpdate.getResultsField());
+            }
+            if (configUpdate.tokenizationUpdate != null) {
+                builder.setTokenization(configUpdate.tokenizationUpdate.apply(tokenization));
+            }
+
+            return builder.build();
+        } else if (update instanceof TokenizationConfigUpdate tokenizationUpdate) {
+            var updatedTokenization = getTokenization().updateSpanSettings(tokenizationUpdate.getSpanSettings());
+            return new TextClassificationConfig.Builder(this).setTokenization(updatedTokenization).build();
+        } else {
+            throw incompatibleUpdateException(update.getName());
+        }
+    }
+
     @Override
     public MlConfigVersion getMinimalSupportedMlConfigVersion() {
         return MlConfigVersion.V_8_0_0;
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextClassificationConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextClassificationConfigUpdate.java
index 460a3a685d534..5379e3eeb17f7 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextClassificationConfigUpdate.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextClassificationConfigUpdate.java
@@ -111,54 +111,6 @@ public void writeTo(StreamOutput out) throws IOException {
         out.writeOptionalString(resultsField);
     }
 
-    @Override
-    public InferenceConfig apply(InferenceConfig originalConfig) {
-        if (originalConfig instanceof TextClassificationConfig == false) {
-            throw ExceptionsHelper.badRequestException(
-                "Inference config of type [{}] can not be updated with a request of type [{}]",
-                originalConfig.getName(),
-                getName()
-            );
-        }
-
-        TextClassificationConfig classificationConfig = (TextClassificationConfig) originalConfig;
-        if (isNoop(classificationConfig)) {
-            return originalConfig;
-        }
-
-        TextClassificationConfig.Builder builder = new TextClassificationConfig.Builder(classificationConfig);
-        if (numTopClasses != null) {
-            builder.setNumTopClasses(numTopClasses);
-        }
-        if (classificationLabels != null) {
-            if (classificationLabels.size() != classificationConfig.getClassificationLabels().size()) {
-                throw ExceptionsHelper.badRequestException(
-                    "The number of [{}] the model is defined with [{}] does not match the number in the update [{}]",
-                    CLASSIFICATION_LABELS,
-                    classificationConfig.getClassificationLabels().size(),
-                    classificationLabels.size()
-                );
-            }
-            builder.setClassificationLabels(classificationLabels);
-        }
-        if (resultsField != null) {
-            builder.setResultsField(resultsField);
-        }
-
-        if (tokenizationUpdate != null) {
-            builder.setTokenization(tokenizationUpdate.apply(classificationConfig.getTokenization()));
-        }
-
-        return builder.build();
-    }
-
-    boolean isNoop(TextClassificationConfig originalConfig) {
-        return (this.numTopClasses == null || this.numTopClasses == originalConfig.getNumTopClasses())
-            && (this.classificationLabels == null)
-            && (this.resultsField == null || this.resultsField.equals(originalConfig.getResultsField()))
-            && super.isNoop();
-    }
-
     @Override
     public boolean isSupported(InferenceConfig config) {
         return config instanceof TextClassificationConfig;
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextEmbeddingConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextEmbeddingConfig.java
index 518b9eb62d793..d043c17535636 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextEmbeddingConfig.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextEmbeddingConfig.java
@@ -145,6 +145,23 @@ public boolean isTargetTypeSupported(TargetType targetType) {
         return false;
     }
 
+    @Override
+    public InferenceConfig apply(InferenceConfigUpdate update) {
+        if (update instanceof TextEmbeddingConfigUpdate configUpdate) {
+            return new TextEmbeddingConfig(
+                vocabularyConfig,
+                configUpdate.tokenizationUpdate == null ? tokenization : configUpdate.tokenizationUpdate.apply(tokenization),
+                configUpdate.getResultsField() == null ? resultsField : configUpdate.getResultsField(),
+                embeddingSize
+            );
+        } else if (update instanceof TokenizationConfigUpdate tokenizationUpdate) {
+            var updatedTokenization = getTokenization().updateSpanSettings(tokenizationUpdate.getSpanSettings());
+            return new TextEmbeddingConfig(vocabularyConfig, updatedTokenization, resultsField, embeddingSize);
+        } else {
+            throw incompatibleUpdateException(update.getName());
+        }
+    }
+
     @Override
     public MlConfigVersion getMinimalSupportedMlConfigVersion() {
         return MlConfigVersion.V_8_0_0;
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextEmbeddingConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextEmbeddingConfigUpdate.java
index 6acd2d209a875..e89281a59f7d2 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextEmbeddingConfigUpdate.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextEmbeddingConfigUpdate.java
@@ -104,29 +104,6 @@ public TransportVersion getMinimalSupportedVersion() {
         return TransportVersions.V_8_0_0;
     }
 
-    @Override
-    public InferenceConfig apply(InferenceConfig originalConfig) {
-        if ((resultsField == null || resultsField.equals(originalConfig.getResultsField())) && super.isNoop()) {
-            return originalConfig;
-        }
-
-        if (originalConfig instanceof TextEmbeddingConfig == false) {
-            throw ExceptionsHelper.badRequestException(
-                "Inference config of type [{}] can not be updated with a inference request of type [{}]",
-                originalConfig.getName(),
-                getName()
-            );
-        }
-
-        TextEmbeddingConfig embeddingConfig = (TextEmbeddingConfig) originalConfig;
-        return new TextEmbeddingConfig(
-            embeddingConfig.getVocabularyConfig(),
-            tokenizationUpdate == null ? embeddingConfig.getTokenization() : tokenizationUpdate.apply(embeddingConfig.getTokenization()),
-            resultsField == null ? embeddingConfig.getResultsField() : resultsField,
-            embeddingConfig.getEmbeddingSize()
-        );
-    }
-
     @Override
     public boolean isSupported(InferenceConfig config) {
         return config instanceof TextEmbeddingConfig;
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextExpansionConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextExpansionConfig.java
index d8315bec14153..c4d78c9faf219 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextExpansionConfig.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextExpansionConfig.java
@@ -121,6 +121,22 @@ public boolean isTargetTypeSupported(TargetType targetType) {
         return false;
     }
 
+    @Override
+    public InferenceConfig apply(InferenceConfigUpdate update) {
+        if (update instanceof TextExpansionConfigUpdate configUpdate) {
+            return new TextExpansionConfig(
+                vocabularyConfig,
+                configUpdate.tokenizationUpdate == null ? tokenization : configUpdate.tokenizationUpdate.apply(tokenization),
+                Optional.ofNullable(configUpdate.getResultsField()).orElse(resultsField)
+            );
+        } else if (update instanceof TokenizationConfigUpdate tokenizationUpdate) {
+            var updatedTokenization = getTokenization().updateSpanSettings(tokenizationUpdate.getSpanSettings());
+            return new TextExpansionConfig(vocabularyConfig, updatedTokenization, resultsField);
+        } else {
+            throw incompatibleUpdateException(update.getName());
+        }
+    }
+
     @Override
     public boolean isAllocateOnly() {
         return true;
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextExpansionConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextExpansionConfigUpdate.java
index 181cadbaf7168..3ba5c91502480 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextExpansionConfigUpdate.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextExpansionConfigUpdate.java
@@ -21,7 +21,6 @@
 import java.util.HashMap;
 import java.util.Map;
 import java.util.Objects;
-import java.util.Optional;
 
 import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.NlpConfig.RESULTS_FIELD;
 import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.NlpConfig.TOKENIZATION;
@@ -100,33 +99,6 @@ public String getName() {
         return NAME;
     }
 
-    @Override
-    public InferenceConfig apply(InferenceConfig originalConfig) {
-        if (originalConfig instanceof TextExpansionConfig == false) {
-            throw ExceptionsHelper.badRequestException(
-                "Inference config of type [{}] can not be updated with a request of type [{}]",
-                originalConfig.getName(),
-                getName()
-            );
-        }
-        TextExpansionConfig textExpansionConfig = (TextExpansionConfig) originalConfig;
-        if (isNoop(textExpansionConfig)) {
-            return textExpansionConfig;
-        }
-
-        return new TextExpansionConfig(
-            textExpansionConfig.getVocabularyConfig(),
-            (tokenizationUpdate == null)
-                ? textExpansionConfig.getTokenization()
-                : tokenizationUpdate.apply(textExpansionConfig.getTokenization()),
-            Optional.ofNullable(resultsField).orElse(textExpansionConfig.getResultsField())
-        );
-    }
-
-    boolean isNoop(TextExpansionConfig originalConfig) {
-        return (this.resultsField == null || this.resultsField.equals(originalConfig.getResultsField())) && super.isNoop();
-    }
-
     @Override
     public boolean isSupported(InferenceConfig config) {
         return config instanceof TextExpansionConfig;
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextSimilarityConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextSimilarityConfig.java
index 5511df03e6f36..bbd819891e217 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextSimilarityConfig.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextSimilarityConfig.java
@@ -149,6 +149,24 @@ public boolean isTargetTypeSupported(TargetType targetType) {
         return false;
     }
 
+    @Override
+    public InferenceConfig apply(InferenceConfigUpdate update) {
+        if (update instanceof TextSimilarityConfigUpdate configUpdate) {
+            return new TextSimilarityConfig(
+                configUpdate.getText(),
+                vocabularyConfig,
+                configUpdate.tokenizationUpdate == null ? tokenization : configUpdate.tokenizationUpdate.apply(tokenization),
+                Optional.ofNullable(configUpdate.getResultsField()).orElse(resultsField),
+                Optional.ofNullable(configUpdate.getSpanScoreFunction()).orElse(spanScoreFunction)
+            );
+        } else if (update instanceof TokenizationConfigUpdate tokenizationUpdate) {
+            var updatedTokenization = getTokenization().updateSpanSettings(tokenizationUpdate.getSpanSettings());
+            return new TextSimilarityConfig(text, vocabularyConfig, updatedTokenization, resultsField, spanScoreFunction);
+        } else {
+            throw incompatibleUpdateException(update.getName());
+        }
+    }
+
     @Override
     public MlConfigVersion getMinimalSupportedMlConfigVersion() {
         return MlConfigVersion.V_8_5_0;
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextSimilarityConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextSimilarityConfigUpdate.java
index c7afacc07b944..2ddbf8bd63f49 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextSimilarityConfigUpdate.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextSimilarityConfigUpdate.java
@@ -110,31 +110,13 @@ public XContentBuilder doXContentBody(XContentBuilder builder, Params params) th
         return builder;
     }
 
-    @Override
-    public String getWriteableName() {
-        return NAME;
+    public TextSimilarityConfig.SpanScoreFunction getSpanScoreFunction() {
+        return spanScoreFunction;
     }
 
     @Override
-    public InferenceConfig apply(InferenceConfig originalConfig) {
-        if (originalConfig instanceof TextSimilarityConfig == false) {
-            throw ExceptionsHelper.badRequestException(
-                "Inference config of type [{}] can not be updated with a inference request of type [{}]",
-                originalConfig.getName(),
-                getName()
-            );
-        }
-
-        TextSimilarityConfig textSimilarityConfig = (TextSimilarityConfig) originalConfig;
-        return new TextSimilarityConfig(
-            text,
-            textSimilarityConfig.getVocabularyConfig(),
-            tokenizationUpdate == null
-                ? textSimilarityConfig.getTokenization()
-                : tokenizationUpdate.apply(textSimilarityConfig.getTokenization()),
-            Optional.ofNullable(resultsField).orElse(textSimilarityConfig.getResultsField()),
-            Optional.ofNullable(spanScoreFunction).orElse(textSimilarityConfig.getSpanScoreFunction())
-        );
+    public String getWriteableName() {
+        return NAME;
     }
 
     @Override
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/Tokenization.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/Tokenization.java
index ef437e0201510..4f301b48cdacc 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/Tokenization.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/Tokenization.java
@@ -7,11 +7,14 @@
 
 package org.elasticsearch.xpack.core.ml.inference.trainedmodel;
 
+import org.elasticsearch.ElasticsearchStatusException;
 import org.elasticsearch.TransportVersions;
 import org.elasticsearch.common.io.stream.NamedWriteable;
 import org.elasticsearch.common.io.stream.StreamInput;
 import org.elasticsearch.common.io.stream.StreamOutput;
+import org.elasticsearch.common.io.stream.Writeable;
 import org.elasticsearch.core.Nullable;
+import org.elasticsearch.rest.RestStatus;
 import org.elasticsearch.xcontent.ConstructingObjectParser;
 import org.elasticsearch.xcontent.ParseField;
 import org.elasticsearch.xcontent.XContentBuilder;
@@ -49,6 +52,19 @@ public String toString() {
         }
     }
 
+    record SpanSettings(@Nullable Integer maxSequenceLength, int span) implements Writeable {
+
+        SpanSettings(StreamInput in) throws IOException {
+            this(in.readOptionalVInt(), in.readVInt());
+        }
+
+        @Override
+        public void writeTo(StreamOutput out) throws IOException {
+            out.writeOptionalVInt(maxSequenceLength);
+            out.writeVInt(span);
+        }
+    };
+
     // TODO add global params like never_split, bos_token, eos_token, mask_token, tokenize_chinese_chars, strip_accents, etc.
     public static final ParseField DO_LOWER_CASE = new ParseField("do_lower_case");
     public static final ParseField WITH_SPECIAL_TOKENS = new ParseField("with_special_tokens");
@@ -104,20 +120,8 @@ public static BertTokenization createDefault() {
                     + "] to indicate no windowing should occur"
             );
         }
-        if (this.span > this.maxSequenceLength) {
-            throw new IllegalArgumentException(
-                "["
-                    + SPAN.getPreferredName()
-                    + "] provided ["
-                    + this.span
-                    + "] must not be greater than ["
-                    + MAX_SEQUENCE_LENGTH.getPreferredName()
-                    + "] provided ["
-                    + this.maxSequenceLength
-                    + "]"
-            );
-        }
-        validateSpanAndTruncate(truncate, span);
+        validateSpanAndMaxSequenceLength(this.maxSequenceLength, this.span);
+        validateSpanAndTruncate(this.truncate, this.span);
     }
 
     public Tokenization(StreamInput in) throws IOException {
@@ -132,6 +136,35 @@ public Tokenization(StreamInput in) throws IOException {
         }
     }
 
+    /**
+     * Return a copy of this with the tokenizer span settings updated
+     * @param update The settings to update
+     * @return An updated Tokenization
+     */
+    public Tokenization updateSpanSettings(SpanSettings update) {
+        int maxLength = update.maxSequenceLength() == null ? this.maxSequenceLength : update.maxSequenceLength();
+        validateSpanAndMaxSequenceLength(maxLength, span);
+        if (update.maxSequenceLength() != null && update.maxSequenceLength() > this.maxSequenceLength) {
+            throw new ElasticsearchStatusException(
+                "Updated max sequence length [{}] cannot be greater " + "than the model's max sequence length [{}]",
+                RestStatus.BAD_REQUEST,
+                update.maxSequenceLength(),
+                this.maxSequenceLength
+            );
+        }
+
+        return buildWindowingTokenization(maxLength, update.span());
+    }
+
+    /**
+     * Build a copy of this with {@code Truncate == NONE} using
+     * the specified max sequence length and span
+     * @param updatedMaxSeqLength Max sequence length
+     * @param updatedSpan Span
+     * @return A new Tokenization object
+     */
+    abstract Tokenization buildWindowingTokenization(int updatedMaxSeqLength, int updatedSpan);
+
     @Override
     public void writeTo(StreamOutput out) throws IOException {
         out.writeBoolean(doLowerCase);
@@ -160,6 +193,22 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws
         return builder;
     }
 
+    public static void validateSpanAndMaxSequenceLength(int maxSequenceLength, int span) {
+        if (span > maxSequenceLength) {
+            throw new IllegalArgumentException(
+                "["
+                    + SPAN.getPreferredName()
+                    + "] provided ["
+                    + span
+                    + "] must not be greater than ["
+                    + MAX_SEQUENCE_LENGTH.getPreferredName()
+                    + "] provided ["
+                    + maxSequenceLength
+                    + "]"
+            );
+        }
+    }
+
     public static void validateSpanAndTruncate(@Nullable Truncate truncate, @Nullable Integer span) {
         if ((span != null && span != UNSET_SPAN_VALUE) && (truncate != null && truncate.isInCompatibleWithSpan())) {
             throw new IllegalArgumentException(
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TokenizationConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TokenizationConfigUpdate.java
new file mode 100644
index 0000000000000..2414fe5776438
--- /dev/null
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TokenizationConfigUpdate.java
@@ -0,0 +1,81 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.core.ml.inference.trainedmodel;
+
+import org.elasticsearch.TransportVersion;
+import org.elasticsearch.common.io.stream.StreamInput;
+import org.elasticsearch.common.io.stream.StreamOutput;
+
+import java.io.IOException;
+import java.util.Objects;
+
+/**
+ * An update that sets the tokenization truncate option to NONE
+ * and updates the span and max sequence length settings.
+ */
+public class TokenizationConfigUpdate implements InferenceConfigUpdate {
+
+    public static final String NAME = "tokenization_update";
+
+    private final Tokenization.SpanSettings spanSettings;
+
+    public TokenizationConfigUpdate(Tokenization.SpanSettings spanSettings) {
+        this.spanSettings = spanSettings;
+    }
+
+    public TokenizationConfigUpdate(StreamInput in) throws IOException {
+        this.spanSettings = new Tokenization.SpanSettings(in);
+    }
+
+    public Tokenization.SpanSettings getSpanSettings() {
+        return spanSettings;
+    }
+
+    @Override
+    public String getWriteableName() {
+        return NAME;
+    }
+
+    @Override
+    public TransportVersion getMinimalSupportedVersion() {
+        return null;
+    }
+
+    @Override
+    public void writeTo(StreamOutput out) throws IOException {
+        spanSettings.writeTo(out);
+    }
+
+    @Override
+    public boolean isSupported(InferenceConfig config) {
+        return true;
+    }
+
+    @Override
+    public String getResultsField() {
+        return null;
+    }
+
+    @Override
+    public Builder, ? extends InferenceConfigUpdate> newBuilder() {
+        throw new UnsupportedOperationException("Tokenization update is not supported as a builder");
+    }
+
+    @Override
+    public boolean equals(Object o) {
+        if (this == o) return true;
+        if (o == null || getClass() != o.getClass()) return false;
+        TokenizationConfigUpdate that = (TokenizationConfigUpdate) o;
+        return Objects.equals(spanSettings, that.spanSettings);
+    }
+
+    @Override
+    public int hashCode() {
+        return Objects.hash(spanSettings);
+    }
+}
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/XLMRobertaTokenization.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/XLMRobertaTokenization.java
index 43016e58420ad..648e52538040d 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/XLMRobertaTokenization.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/XLMRobertaTokenization.java
@@ -72,6 +72,11 @@ public XLMRobertaTokenization(StreamInput in) throws IOException {
         super(in);
     }
 
+    @Override
+    protected Tokenization buildWindowingTokenization(int maxSeqLength, int span) {
+        return new XLMRobertaTokenization(withSpecialTokens, maxSeqLength, Truncate.NONE, span);
+    }
+
     @Override
     public String getWriteableName() {
         return NAME;
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ZeroShotClassificationConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ZeroShotClassificationConfig.java
index ba4c130b987d2..4c669f289016a 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ZeroShotClassificationConfig.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ZeroShotClassificationConfig.java
@@ -198,6 +198,43 @@ public boolean isTargetTypeSupported(TargetType targetType) {
         return false;
     }
 
+    @Override
+    public InferenceConfig apply(InferenceConfigUpdate update) {
+        if (update instanceof ZeroShotClassificationConfigUpdate configUpdate) {
+            if ((configUpdate.getLabels() == null || configUpdate.getLabels().isEmpty())
+                && (this.labels == null || this.labels.isEmpty())) {
+                throw ExceptionsHelper.badRequestException(
+                    "stored configuration has no [{}] defined, supplied inference_config update must supply [{}]",
+                    LABELS.getPreferredName(),
+                    LABELS.getPreferredName()
+                );
+            }
+
+            return new ZeroShotClassificationConfig(
+                classificationLabels,
+                vocabularyConfig,
+                configUpdate.tokenizationUpdate == null ? tokenization : configUpdate.tokenizationUpdate.apply(tokenization),
+                hypothesisTemplate,
+                Optional.ofNullable(configUpdate.getMultiLabel()).orElse(isMultiLabel),
+                Optional.ofNullable(configUpdate.getLabels()).orElse(labels),
+                Optional.ofNullable(configUpdate.getResultsField()).orElse(resultsField)
+            );
+        } else if (update instanceof TokenizationConfigUpdate tokenizationUpdate) {
+            var updatedTokenization = getTokenization().updateSpanSettings(tokenizationUpdate.getSpanSettings());
+            return new ZeroShotClassificationConfig(
+                classificationLabels,
+                vocabularyConfig,
+                updatedTokenization,
+                hypothesisTemplate,
+                isMultiLabel,
+                labels,
+                resultsField
+            );
+        } else {
+            throw incompatibleUpdateException(update.getName());
+        }
+    }
+
     @Override
     public MlConfigVersion getMinimalSupportedMlConfigVersion() {
         return MlConfigVersion.V_8_0_0;
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ZeroShotClassificationConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ZeroShotClassificationConfigUpdate.java
index 47fd75ed6ff42..8f03d5e3d01cf 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ZeroShotClassificationConfigUpdate.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ZeroShotClassificationConfigUpdate.java
@@ -23,7 +23,6 @@
 import java.util.List;
 import java.util.Map;
 import java.util.Objects;
-import java.util.Optional;
 
 import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.NlpConfig.RESULTS_FIELD;
 import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.NlpConfig.TOKENIZATION;
@@ -121,45 +120,6 @@ public String getWriteableName() {
         return NAME;
     }
 
-    @Override
-    public InferenceConfig apply(InferenceConfig originalConfig) {
-        if (originalConfig instanceof ZeroShotClassificationConfig == false) {
-            throw ExceptionsHelper.badRequestException(
-                "Inference config of type [{}] can not be updated with a inference request of type [{}]",
-                originalConfig.getName(),
-                getName()
-            );
-        }
-
-        ZeroShotClassificationConfig zeroShotConfig = (ZeroShotClassificationConfig) originalConfig;
-        if ((labels == null || labels.isEmpty()) && (zeroShotConfig.getLabels() == null || zeroShotConfig.getLabels().isEmpty())) {
-            throw ExceptionsHelper.badRequestException(
-                "stored configuration has no [{}] defined, supplied inference_config update must supply [{}]",
-                LABELS.getPreferredName(),
-                LABELS.getPreferredName()
-            );
-        }
-        if (isNoop(zeroShotConfig)) {
-            return originalConfig;
-        }
-        return new ZeroShotClassificationConfig(
-            zeroShotConfig.getClassificationLabels(),
-            zeroShotConfig.getVocabularyConfig(),
-            tokenizationUpdate == null ? zeroShotConfig.getTokenization() : tokenizationUpdate.apply(zeroShotConfig.getTokenization()),
-            zeroShotConfig.getHypothesisTemplate(),
-            Optional.ofNullable(isMultiLabel).orElse(zeroShotConfig.isMultiLabel()),
-            Optional.ofNullable(labels).orElse(zeroShotConfig.getLabels().orElse(null)),
-            Optional.ofNullable(resultsField).orElse(zeroShotConfig.getResultsField())
-        );
-    }
-
-    boolean isNoop(ZeroShotClassificationConfig originalConfig) {
-        return (labels == null || labels.equals(originalConfig.getLabels().orElse(null)))
-            && (isMultiLabel == null || isMultiLabel.equals(originalConfig.isMultiLabel()))
-            && (resultsField == null || resultsField.equals(originalConfig.getResultsField()))
-            && super.isNoop();
-    }
-
     @Override
     public boolean isSupported(InferenceConfig config) {
         return config instanceof ZeroShotClassificationConfig;
diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/BertJapaneseTokenizationTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/BertJapaneseTokenizationTests.java
index 79c069afbd4ab..9253469ecc49d 100644
--- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/BertJapaneseTokenizationTests.java
+++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/BertJapaneseTokenizationTests.java
@@ -63,6 +63,13 @@ protected BertJapaneseTokenization mutateInstanceForVersion(BertJapaneseTokeniza
         return mutateForVersion(instance, version);
     }
 
+    public void testsBuildUpdatedTokenization() {
+        var update = new BertJapaneseTokenization(true, true, 100, Tokenization.Truncate.FIRST, -1).buildWindowingTokenization(50, 20);
+        assertEquals(Tokenization.Truncate.NONE, update.getTruncate());
+        assertEquals(50, update.maxSequenceLength());
+        assertEquals(20, update.getSpan());
+    }
+
     public static BertJapaneseTokenization createRandom() {
         return new BertJapaneseTokenization(
             randomBoolean() ? null : randomBoolean(),
diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/BertTokenizationTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/BertTokenizationTests.java
index a00ebec79a862..b9cda9a2068ea 100644
--- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/BertTokenizationTests.java
+++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/BertTokenizationTests.java
@@ -63,6 +63,13 @@ protected BertTokenization mutateInstanceForVersion(BertTokenization instance, T
         return mutateForVersion(instance, version);
     }
 
+    public void testsBuildUpdatedTokenization() {
+        var update = new BertTokenization(true, true, 100, Tokenization.Truncate.FIRST, -1).buildWindowingTokenization(50, 20);
+        assertEquals(Tokenization.Truncate.NONE, update.getTruncate());
+        assertEquals(50, update.maxSequenceLength());
+        assertEquals(20, update.getSpan());
+    }
+
     public static BertTokenization createRandom() {
         return new BertTokenization(
             randomBoolean() ? null : randomBoolean(),
diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ClassificationConfigUpdateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ClassificationConfigUpdateTests.java
index 1d52deaafa719..620036a040368 100644
--- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ClassificationConfigUpdateTests.java
+++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ClassificationConfigUpdateTests.java
@@ -59,11 +59,11 @@ public void testFromMapWithUnknownField() {
     public void testApply() {
         ClassificationConfig originalConfig = randomClassificationConfig();
 
-        assertThat(originalConfig, equalTo(ClassificationConfigUpdate.EMPTY_PARAMS.apply(originalConfig)));
+        assertThat(originalConfig, equalTo(originalConfig.apply(ClassificationConfigUpdate.EMPTY_PARAMS)));
 
         assertThat(
             new ClassificationConfig.Builder(originalConfig).setNumTopClasses(5).build(),
-            equalTo(new ClassificationConfigUpdate.Builder().setNumTopClasses(5).build().apply(originalConfig))
+            equalTo(originalConfig.apply(new ClassificationConfigUpdate.Builder().setNumTopClasses(5).build()))
         );
         assertThat(
             new ClassificationConfig.Builder().setNumTopClasses(5)
@@ -73,13 +73,14 @@ public void testApply() {
                 .setTopClassesResultsField("bar")
                 .build(),
             equalTo(
-                new ClassificationConfigUpdate.Builder().setNumTopClasses(5)
-                    .setNumTopFeatureImportanceValues(1)
-                    .setPredictionFieldType(PredictionFieldType.BOOLEAN)
-                    .setResultsField("foo")
-                    .setTopClassesResultsField("bar")
-                    .build()
-                    .apply(originalConfig)
+                originalConfig.apply(
+                    new ClassificationConfigUpdate.Builder().setNumTopClasses(5)
+                        .setNumTopFeatureImportanceValues(1)
+                        .setPredictionFieldType(PredictionFieldType.BOOLEAN)
+                        .setResultsField("foo")
+                        .setTopClassesResultsField("bar")
+                        .build()
+                )
             )
         );
     }
diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/FillMaskConfigUpdateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/FillMaskConfigUpdateTests.java
index 40eb9a4afd35f..385f5b1ddbf83 100644
--- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/FillMaskConfigUpdateTests.java
+++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/FillMaskConfigUpdateTests.java
@@ -62,39 +62,19 @@ FillMaskConfigUpdate fromMap(Map map) {
         return FillMaskConfigUpdate.fromMap(map);
     }
 
-    public void testIsNoop() {
-        assertTrue(new FillMaskConfigUpdate.Builder().build().isNoop(FillMaskConfigTests.createRandom()));
-
-        assertFalse(
-            new FillMaskConfigUpdate.Builder().setResultsField("foo")
-                .build()
-                .isNoop(new FillMaskConfig.Builder().setResultsField("bar").build())
-        );
-
-        assertFalse(
-            new FillMaskConfigUpdate.Builder().setTokenizationUpdate(new BertTokenizationUpdate(Tokenization.Truncate.SECOND, null))
-                .build()
-                .isNoop(new FillMaskConfig.Builder().setResultsField("bar").build())
-        );
-
-        assertTrue(
-            new FillMaskConfigUpdate.Builder().setNumTopClasses(3).build().isNoop(new FillMaskConfig.Builder().setNumTopClasses(3).build())
-        );
-    }
-
     public void testApply() {
         FillMaskConfig originalConfig = FillMaskConfigTests.createRandom();
 
-        assertThat(originalConfig, equalTo(new FillMaskConfigUpdate.Builder().build().apply(originalConfig)));
+        assertThat(originalConfig, equalTo(originalConfig.apply(new FillMaskConfigUpdate.Builder().build())));
 
         assertThat(
             new FillMaskConfig.Builder(originalConfig).setResultsField("ml-results").build(),
-            equalTo(new FillMaskConfigUpdate.Builder().setResultsField("ml-results").build().apply(originalConfig))
+            equalTo(originalConfig.apply(new FillMaskConfigUpdate.Builder().setResultsField("ml-results").build()))
         );
         assertThat(
             new FillMaskConfig.Builder(originalConfig).setNumTopClasses(originalConfig.getNumTopClasses() + 1).build(),
             equalTo(
-                new FillMaskConfigUpdate.Builder().setNumTopClasses(originalConfig.getNumTopClasses() + 1).build().apply(originalConfig)
+                originalConfig.apply(new FillMaskConfigUpdate.Builder().setNumTopClasses(originalConfig.getNumTopClasses() + 1).build())
             )
         );
 
@@ -103,9 +83,11 @@ public void testApply() {
         assertThat(
             new FillMaskConfig.Builder(originalConfig).setTokenization(tokenization).build(),
             equalTo(
-                new FillMaskConfigUpdate.Builder().setTokenizationUpdate(
-                    createTokenizationUpdate(originalConfig.getTokenization(), truncate, null)
-                ).build().apply(originalConfig)
+                originalConfig.apply(
+                    new FillMaskConfigUpdate.Builder().setTokenizationUpdate(
+                        createTokenizationUpdate(originalConfig.getTokenization(), truncate, null)
+                    ).build()
+                )
             )
         );
     }
diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/NerConfigUpdateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/NerConfigUpdateTests.java
index eb2afa501a4cc..72ba9fa5ba540 100644
--- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/NerConfigUpdateTests.java
+++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/NerConfigUpdateTests.java
@@ -20,7 +20,6 @@
 import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfigTestScaffolding.cloneWithNewTruncation;
 import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfigTestScaffolding.createTokenizationUpdate;
 import static org.hamcrest.Matchers.equalTo;
-import static org.hamcrest.Matchers.sameInstance;
 
 public class NerConfigUpdateTests extends AbstractNlpConfigUpdateTestCase {
 
@@ -61,7 +60,7 @@ NerConfigUpdate fromMap(Map map) {
     public void testApply() {
         NerConfig originalConfig = NerConfigTests.createRandom();
 
-        assertThat(originalConfig, sameInstance(new NerConfigUpdate.Builder().build().apply(originalConfig)));
+        assertThat(originalConfig, equalTo(originalConfig.apply(new NerConfigUpdate.Builder().build())));
 
         assertThat(
             new NerConfig(
@@ -70,7 +69,7 @@ public void testApply() {
                 originalConfig.getClassificationLabels(),
                 "ml-results"
             ),
-            equalTo(new NerConfigUpdate.Builder().setResultsField("ml-results").build().apply(originalConfig))
+            equalTo(originalConfig.apply(new NerConfigUpdate.Builder().setResultsField("ml-results").build()))
         );
 
         Tokenization.Truncate truncate = randomFrom(Tokenization.Truncate.values());
@@ -83,9 +82,11 @@ public void testApply() {
                 originalConfig.getResultsField()
             ),
             equalTo(
-                new NerConfigUpdate.Builder().setTokenizationUpdate(
-                    createTokenizationUpdate(originalConfig.getTokenization(), truncate, null)
-                ).build().apply(originalConfig)
+                originalConfig.apply(
+                    new NerConfigUpdate.Builder().setTokenizationUpdate(
+                        createTokenizationUpdate(originalConfig.getTokenization(), truncate, null)
+                    ).build()
+                )
             )
         );
     }
diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/PassThroughConfigUpdateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/PassThroughConfigUpdateTests.java
index 9cbf73dfe4809..caec28a93e5a3 100644
--- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/PassThroughConfigUpdateTests.java
+++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/PassThroughConfigUpdateTests.java
@@ -20,7 +20,6 @@
 import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfigTestScaffolding.cloneWithNewTruncation;
 import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfigTestScaffolding.createTokenizationUpdate;
 import static org.hamcrest.Matchers.equalTo;
-import static org.hamcrest.Matchers.sameInstance;
 
 public class PassThroughConfigUpdateTests extends AbstractNlpConfigUpdateTestCase {
 
@@ -61,11 +60,11 @@ PassThroughConfigUpdate fromMap(Map map) {
     public void testApply() {
         PassThroughConfig originalConfig = PassThroughConfigTests.createRandom();
 
-        assertThat(originalConfig, sameInstance(new PassThroughConfigUpdate.Builder().build().apply(originalConfig)));
+        assertEquals(originalConfig, originalConfig.apply(new PassThroughConfigUpdate.Builder().build()));
 
         assertThat(
             new PassThroughConfig(originalConfig.getVocabularyConfig(), originalConfig.getTokenization(), "ml-results"),
-            equalTo(new PassThroughConfigUpdate.Builder().setResultsField("ml-results").build().apply(originalConfig))
+            equalTo(originalConfig.apply(new PassThroughConfigUpdate.Builder().setResultsField("ml-results").build()))
         );
 
         Tokenization.Truncate truncate = randomFrom(Tokenization.Truncate.values());
@@ -73,9 +72,11 @@ public void testApply() {
         assertThat(
             new PassThroughConfig(originalConfig.getVocabularyConfig(), tokenization, originalConfig.getResultsField()),
             equalTo(
-                new PassThroughConfigUpdate.Builder().setTokenizationUpdate(
-                    createTokenizationUpdate(originalConfig.getTokenization(), truncate, null)
-                ).build().apply(originalConfig)
+                originalConfig.apply(
+                    new PassThroughConfigUpdate.Builder().setTokenizationUpdate(
+                        createTokenizationUpdate(originalConfig.getTokenization(), truncate, null)
+                    ).build()
+                )
             )
         );
     }
diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/QuestionAnsweringConfigUpdateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/QuestionAnsweringConfigUpdateTests.java
index 46f11e7c5f793..e787b770b5da5 100644
--- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/QuestionAnsweringConfigUpdateTests.java
+++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/QuestionAnsweringConfigUpdateTests.java
@@ -122,11 +122,12 @@ public void testApply() {
                 originalConfig.getResultsField()
             ),
             equalTo(
-                new QuestionAnsweringConfigUpdate.Builder().setQuestion("Are you my mother?")
-                    .setNumTopClasses(4)
-                    .setMaxAnswerLength(40)
-                    .build()
-                    .apply(originalConfig)
+                originalConfig.apply(
+                    new QuestionAnsweringConfigUpdate.Builder().setQuestion("Are you my mother?")
+                        .setNumTopClasses(4)
+                        .setMaxAnswerLength(40)
+                        .build()
+                )
             )
         );
         assertThat(
@@ -139,10 +140,9 @@ public void testApply() {
                 "updated-field"
             ),
             equalTo(
-                new QuestionAnsweringConfigUpdate.Builder().setQuestion("Are you my mother?")
-                    .setResultsField("updated-field")
-                    .build()
-                    .apply(originalConfig)
+                originalConfig.apply(
+                    new QuestionAnsweringConfigUpdate.Builder().setQuestion("Are you my mother?").setResultsField("updated-field").build()
+                )
             )
         );
 
@@ -158,10 +158,11 @@ public void testApply() {
                 originalConfig.getResultsField()
             ),
             equalTo(
-                new QuestionAnsweringConfigUpdate.Builder().setQuestion("Are you my mother?")
-                    .setTokenizationUpdate(createTokenizationUpdate(originalConfig.getTokenization(), truncate, null))
-                    .build()
-                    .apply(originalConfig)
+                originalConfig.apply(
+                    new QuestionAnsweringConfigUpdate.Builder().setQuestion("Are you my mother?")
+                        .setTokenizationUpdate(createTokenizationUpdate(originalConfig.getTokenization(), truncate, null))
+                        .build()
+                )
             )
         );
     }
diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RegressionConfigUpdateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RegressionConfigUpdateTests.java
index 4c60ca7f885c4..35d2cb7fda16f 100644
--- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RegressionConfigUpdateTests.java
+++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RegressionConfigUpdateTests.java
@@ -53,19 +53,18 @@ public void testFromMapWithUnknownField() {
     public void testApply() {
         RegressionConfig originalConfig = randomRegressionConfig();
 
-        assertThat(originalConfig, equalTo(RegressionConfigUpdate.EMPTY_PARAMS.apply(originalConfig)));
+        assertThat(originalConfig, equalTo(originalConfig.apply(RegressionConfigUpdate.EMPTY_PARAMS)));
 
         assertThat(
             new RegressionConfig.Builder(originalConfig).setNumTopFeatureImportanceValues(5).build(),
-            equalTo(new RegressionConfigUpdate.Builder().setNumTopFeatureImportanceValues(5).build().apply(originalConfig))
+            equalTo(originalConfig.apply(new RegressionConfigUpdate.Builder().setNumTopFeatureImportanceValues(5).build()))
         );
         assertThat(
             new RegressionConfig.Builder().setNumTopFeatureImportanceValues(1).setResultsField("foo").build(),
             equalTo(
-                new RegressionConfigUpdate.Builder().setNumTopFeatureImportanceValues(1)
-                    .setResultsField("foo")
-                    .build()
-                    .apply(originalConfig)
+                originalConfig.apply(
+                    new RegressionConfigUpdate.Builder().setNumTopFeatureImportanceValues(1).setResultsField("foo").build()
+                )
             )
         );
     }
diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ResultsFieldUpdateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ResultsFieldUpdateTests.java
index 4237458d01f63..9accabb788669 100644
--- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ResultsFieldUpdateTests.java
+++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ResultsFieldUpdateTests.java
@@ -44,7 +44,7 @@ public void testApply_OnlyTheResultsFieldIsChanged() {
             ClassificationConfig config = ClassificationConfigTests.randomClassificationConfig();
             String newResultsField = config.getResultsField() + "foobar";
             ResultsFieldUpdate update = new ResultsFieldUpdate(newResultsField);
-            InferenceConfig applied = update.apply(config);
+            InferenceConfig applied = config.apply(update);
 
             assertThat(applied, instanceOf(ClassificationConfig.class));
             ClassificationConfig appliedConfig = (ClassificationConfig) applied;
@@ -55,7 +55,7 @@ public void testApply_OnlyTheResultsFieldIsChanged() {
             RegressionConfig config = RegressionConfigTests.randomRegressionConfig();
             String newResultsField = config.getResultsField() + "foobar";
             ResultsFieldUpdate update = new ResultsFieldUpdate(newResultsField);
-            InferenceConfig applied = update.apply(config);
+            InferenceConfig applied = config.apply(update);
 
             assertThat(applied, instanceOf(RegressionConfig.class));
             RegressionConfig appliedConfig = (RegressionConfig) applied;
diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RobertaTokenizationTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RobertaTokenizationTests.java
index 4f2c167015816..8cedd20432a6e 100644
--- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RobertaTokenizationTests.java
+++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RobertaTokenizationTests.java
@@ -63,6 +63,13 @@ protected RobertaTokenization mutateInstanceForVersion(RobertaTokenization insta
         return mutateForVersion(instance, version);
     }
 
+    public void testsBuildUpdatedTokenization() {
+        var update = new RobertaTokenization(true, true, 100, Tokenization.Truncate.FIRST, -1).buildWindowingTokenization(50, 20);
+        assertEquals(Tokenization.Truncate.NONE, update.getTruncate());
+        assertEquals(50, update.maxSequenceLength());
+        assertEquals(20, update.getSpan());
+    }
+
     public static RobertaTokenization createRandom() {
         return new RobertaTokenization(
             randomBoolean() ? null : randomBoolean(),
diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextClassificationConfigUpdateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextClassificationConfigUpdateTests.java
index 72d963da8f0be..25b4299b41e8e 100644
--- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextClassificationConfigUpdateTests.java
+++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextClassificationConfigUpdateTests.java
@@ -79,38 +79,6 @@ TextClassificationConfigUpdate fromMap(Map map) {
         return TextClassificationConfigUpdate.fromMap(map);
     }
 
-    public void testIsNoop() {
-        assertTrue(new TextClassificationConfigUpdate.Builder().build().isNoop(TextClassificationConfigTests.createRandom()));
-
-        assertFalse(
-            new TextClassificationConfigUpdate.Builder().setResultsField("foo")
-                .build()
-                .isNoop(
-                    new TextClassificationConfig.Builder().setClassificationLabels(List.of("a", "b"))
-                        .setNumTopClasses(-1)
-                        .setResultsField("bar")
-                        .build()
-                )
-        );
-
-        assertTrue(
-            new TextClassificationConfigUpdate.Builder().setNumTopClasses(3)
-                .build()
-                .isNoop(new TextClassificationConfig.Builder().setClassificationLabels(List.of("a", "b")).setNumTopClasses(3).build())
-        );
-        assertFalse(
-            new TextClassificationConfigUpdate.Builder().setClassificationLabels(List.of("a", "b"))
-                .build()
-                .isNoop(new TextClassificationConfig.Builder().setClassificationLabels(List.of("c", "d")).build())
-        );
-        assertFalse(
-            new TextClassificationConfigUpdate.Builder().setTokenizationUpdate(
-                new BertTokenizationUpdate(Tokenization.Truncate.SECOND, null)
-            ).build().isNoop(new TextClassificationConfig.Builder().setClassificationLabels(List.of("c", "d")).build())
-        );
-
-    }
-
     public void testApply() {
         TextClassificationConfig originalConfig = new TextClassificationConfig(
             VocabularyConfigTests.createRandom(),
@@ -120,24 +88,24 @@ public void testApply() {
             "foo-results"
         );
 
-        assertThat(originalConfig, equalTo(new TextClassificationConfigUpdate.Builder().build().apply(originalConfig)));
+        assertThat(originalConfig, equalTo(originalConfig.apply(new TextClassificationConfigUpdate.Builder().build())));
 
         assertThat(
             new TextClassificationConfig.Builder(originalConfig).setClassificationLabels(List.of("foo", "bar")).build(),
             equalTo(
-                new TextClassificationConfigUpdate.Builder().setClassificationLabels(List.of("foo", "bar")).build().apply(originalConfig)
+                originalConfig.apply(new TextClassificationConfigUpdate.Builder().setClassificationLabels(List.of("foo", "bar")).build())
             )
         );
         assertThat(
             new TextClassificationConfig.Builder(originalConfig).setResultsField("ml-results").build(),
-            equalTo(new TextClassificationConfigUpdate.Builder().setResultsField("ml-results").build().apply(originalConfig))
+            equalTo(originalConfig.apply(new TextClassificationConfigUpdate.Builder().setResultsField("ml-results").build()))
         );
         assertThat(
             new TextClassificationConfig.Builder(originalConfig).setNumTopClasses(originalConfig.getNumTopClasses() + 2).build(),
             equalTo(
-                new TextClassificationConfigUpdate.Builder().setNumTopClasses(originalConfig.getNumTopClasses() + 2)
-                    .build()
-                    .apply(originalConfig)
+                originalConfig.apply(
+                    new TextClassificationConfigUpdate.Builder().setNumTopClasses(originalConfig.getNumTopClasses() + 2).build()
+                )
             )
         );
 
@@ -146,9 +114,11 @@ public void testApply() {
         assertThat(
             new TextClassificationConfig.Builder(originalConfig).setTokenization(tokenization).build(),
             equalTo(
-                new TextClassificationConfigUpdate.Builder().setTokenizationUpdate(
-                    createTokenizationUpdate(originalConfig.getTokenization(), truncate, null)
-                ).build().apply(originalConfig)
+                originalConfig.apply(
+                    new TextClassificationConfigUpdate.Builder().setTokenizationUpdate(
+                        createTokenizationUpdate(originalConfig.getTokenization(), truncate, null)
+                    ).build()
+                )
             )
         );
     }
@@ -161,7 +131,7 @@ public void testApplyWithInvalidLabels() {
 
         var update = new TextClassificationConfigUpdate.Builder().setClassificationLabels(newLabels).build();
 
-        ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, () -> update.apply(originalConfig));
+        ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, () -> originalConfig.apply(update));
         assertThat(
             e.getMessage(),
             containsString(
diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextEmbeddingConfigUpdateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextEmbeddingConfigUpdateTests.java
index 06abb12bdb0a2..ecff9c1010c46 100644
--- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextEmbeddingConfigUpdateTests.java
+++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextEmbeddingConfigUpdateTests.java
@@ -20,7 +20,6 @@
 import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfigTestScaffolding.cloneWithNewTruncation;
 import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfigTestScaffolding.createTokenizationUpdate;
 import static org.hamcrest.Matchers.equalTo;
-import static org.hamcrest.Matchers.sameInstance;
 
 public class TextEmbeddingConfigUpdateTests extends AbstractNlpConfigUpdateTestCase {
 
@@ -61,7 +60,7 @@ TextEmbeddingConfigUpdate fromMap(Map map) {
     public void testApply() {
         TextEmbeddingConfig originalConfig = TextEmbeddingConfigTests.createRandom();
 
-        assertThat(originalConfig, sameInstance(new TextEmbeddingConfigUpdate.Builder().build().apply(originalConfig)));
+        assertThat(originalConfig, equalTo(originalConfig.apply(new TextEmbeddingConfigUpdate.Builder().build())));
 
         assertThat(
             new TextEmbeddingConfig(
@@ -70,7 +69,7 @@ public void testApply() {
                 "ml-results",
                 originalConfig.getEmbeddingSize()
             ),
-            equalTo(new TextEmbeddingConfigUpdate.Builder().setResultsField("ml-results").build().apply(originalConfig))
+            equalTo(originalConfig.apply(new TextEmbeddingConfigUpdate.Builder().setResultsField("ml-results").build()))
         );
 
         Tokenization.Truncate truncate = randomFrom(Tokenization.Truncate.values());
@@ -83,9 +82,11 @@ public void testApply() {
                 originalConfig.getEmbeddingSize()
             ),
             equalTo(
-                new TextEmbeddingConfigUpdate.Builder().setTokenizationUpdate(
-                    createTokenizationUpdate(originalConfig.getTokenization(), truncate, null)
-                ).build().apply(originalConfig)
+                originalConfig.apply(
+                    new TextEmbeddingConfigUpdate.Builder().setTokenizationUpdate(
+                        createTokenizationUpdate(originalConfig.getTokenization(), truncate, null)
+                    ).build()
+                )
             )
         );
     }
diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextSimilarityConfigUpdateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextSimilarityConfigUpdateTests.java
index b8b5f50d15bae..e5061a743c672 100644
--- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextSimilarityConfigUpdateTests.java
+++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextSimilarityConfigUpdateTests.java
@@ -127,7 +127,7 @@ public void testApply() {
                 originalConfig.getResultsField(),
                 originalConfig.getSpanScoreFunction()
             ),
-            equalTo(new TextSimilarityConfigUpdate.Builder().setText("Are you my mother?").build().apply(originalConfig))
+            equalTo(originalConfig.apply(new TextSimilarityConfigUpdate.Builder().setText("Are you my mother?").build()))
         );
         assertThat(
             new TextSimilarityConfig(
@@ -138,10 +138,9 @@ public void testApply() {
                 originalConfig.getSpanScoreFunction()
             ),
             equalTo(
-                new TextSimilarityConfigUpdate.Builder().setText("Are you my mother?")
-                    .setResultsField("updated-field")
-                    .build()
-                    .apply(originalConfig)
+                originalConfig.apply(
+                    new TextSimilarityConfigUpdate.Builder().setText("Are you my mother?").setResultsField("updated-field").build()
+                )
             )
         );
 
@@ -156,10 +155,11 @@ public void testApply() {
                 originalConfig.getSpanScoreFunction()
             ),
             equalTo(
-                new TextSimilarityConfigUpdate.Builder().setText("Are you my mother?")
-                    .setTokenizationUpdate(createTokenizationUpdate(originalConfig.getTokenization(), truncate, null))
-                    .build()
-                    .apply(originalConfig)
+                originalConfig.apply(
+                    new TextSimilarityConfigUpdate.Builder().setText("Are you my mother?")
+                        .setTokenizationUpdate(createTokenizationUpdate(originalConfig.getTokenization(), truncate, null))
+                        .build()
+                )
             )
         );
     }
diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TokenizationConfigUpdateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TokenizationConfigUpdateTests.java
new file mode 100644
index 0000000000000..431dcf6c8c769
--- /dev/null
+++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TokenizationConfigUpdateTests.java
@@ -0,0 +1,32 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.core.ml.inference.trainedmodel;
+
+import org.elasticsearch.common.io.stream.Writeable;
+import org.elasticsearch.test.AbstractWireSerializingTestCase;
+
+import java.io.IOException;
+
+public class TokenizationConfigUpdateTests extends AbstractWireSerializingTestCase {
+    @Override
+    protected Writeable.Reader instanceReader() {
+        return TokenizationConfigUpdate::new;
+    }
+
+    @Override
+    protected TokenizationConfigUpdate createTestInstance() {
+        Integer maxSequenceLength = randomBoolean() ? null : randomIntBetween(32, 64);
+        int span = randomIntBetween(8, 16);
+        return new TokenizationConfigUpdate(new Tokenization.SpanSettings(maxSequenceLength, span));
+    }
+
+    @Override
+    protected TokenizationConfigUpdate mutateInstance(TokenizationConfigUpdate instance) throws IOException {
+        return null;
+    }
+}
diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ZeroShotClassificationConfigUpdateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ZeroShotClassificationConfigUpdateTests.java
index 09c8eed048d96..ed034bb8518d9 100644
--- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ZeroShotClassificationConfigUpdateTests.java
+++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ZeroShotClassificationConfigUpdateTests.java
@@ -108,7 +108,7 @@ public void testApply() {
             randomBoolean() ? null : randomAlphaOfLength(8)
         );
 
-        assertThat(originalConfig, equalTo(new ZeroShotClassificationConfigUpdate.Builder().build().apply(originalConfig)));
+        assertThat(originalConfig, equalTo(originalConfig.apply(new ZeroShotClassificationConfigUpdate.Builder().build())));
 
         assertThat(
             new ZeroShotClassificationConfig(
@@ -120,7 +120,7 @@ public void testApply() {
                 List.of("foo", "bar"),
                 originalConfig.getResultsField()
             ),
-            equalTo(new ZeroShotClassificationConfigUpdate.Builder().setLabels(List.of("foo", "bar")).build().apply(originalConfig))
+            equalTo(originalConfig.apply(new ZeroShotClassificationConfigUpdate.Builder().setLabels(List.of("foo", "bar")).build()))
         );
         assertThat(
             new ZeroShotClassificationConfig(
@@ -132,7 +132,7 @@ public void testApply() {
                 originalConfig.getLabels().orElse(null),
                 originalConfig.getResultsField()
             ),
-            equalTo(new ZeroShotClassificationConfigUpdate.Builder().setMultiLabel(true).build().apply(originalConfig))
+            equalTo(originalConfig.apply(new ZeroShotClassificationConfigUpdate.Builder().setMultiLabel(true).build()))
         );
         assertThat(
             new ZeroShotClassificationConfig(
@@ -144,7 +144,7 @@ public void testApply() {
                 originalConfig.getLabels().orElse(null),
                 "updated-field"
             ),
-            equalTo(new ZeroShotClassificationConfigUpdate.Builder().setResultsField("updated-field").build().apply(originalConfig))
+            equalTo(originalConfig.apply(new ZeroShotClassificationConfigUpdate.Builder().setResultsField("updated-field").build()))
         );
 
         Tokenization.Truncate truncate = randomFrom(Tokenization.Truncate.values());
@@ -160,9 +160,11 @@ public void testApply() {
                 originalConfig.getResultsField()
             ),
             equalTo(
-                new ZeroShotClassificationConfigUpdate.Builder().setTokenizationUpdate(
-                    createTokenizationUpdate(originalConfig.getTokenization(), truncate, null)
-                ).build().apply(originalConfig)
+                originalConfig.apply(
+                    new ZeroShotClassificationConfigUpdate.Builder().setTokenizationUpdate(
+                        createTokenizationUpdate(originalConfig.getTokenization(), truncate, null)
+                    ).build()
+                )
             )
         );
     }
@@ -178,41 +180,13 @@ public void testApplyWithEmptyLabelsInConfigAndUpdate() {
             null
         );
 
-        Exception ex = expectThrows(Exception.class, () -> new ZeroShotClassificationConfigUpdate.Builder().build().apply(originalConfig));
+        Exception ex = expectThrows(Exception.class, () -> originalConfig.apply(new ZeroShotClassificationConfigUpdate.Builder().build()));
         assertThat(
             ex.getMessage(),
             containsString("stored configuration has no [labels] defined, supplied inference_config update must supply [labels]")
         );
     }
 
-    public void testIsNoop() {
-        assertTrue(new ZeroShotClassificationConfigUpdate.Builder().build().isNoop(ZeroShotClassificationConfigTests.createRandom()));
-
-        var originalConfig = new ZeroShotClassificationConfig(
-            List.of("contradiction", "neutral", "entailment"),
-            randomBoolean() ? null : VocabularyConfigTests.createRandom(),
-            randomBoolean() ? null : BertTokenizationTests.createRandom(),
-            randomAlphaOfLength(10),
-            randomBoolean(),
-            null,
-            randomBoolean() ? null : randomAlphaOfLength(8)
-        );
-
-        var update = new ZeroShotClassificationConfigUpdate.Builder().setLabels(List.of("glad", "sad", "mad")).build();
-        assertFalse(update.isNoop(originalConfig));
-
-        originalConfig = new ZeroShotClassificationConfig(
-            List.of("contradiction", "neutral", "entailment"),
-            randomBoolean() ? null : VocabularyConfigTests.createRandom(),
-            randomBoolean() ? null : BertTokenizationTests.createRandom(),
-            randomAlphaOfLength(10),
-            randomBoolean(),
-            List.of("glad", "sad", "mad"),
-            randomBoolean() ? null : randomAlphaOfLength(8)
-        );
-        assertTrue(update.isNoop(originalConfig));
-    }
-
     public static ZeroShotClassificationConfigUpdate createRandom() {
         return randomUpdate();
     }
diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/TrainedModelDeploymentTask.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/TrainedModelDeploymentTask.java
index cd7ed9e3eb55a..851dd8744d03e 100644
--- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/TrainedModelDeploymentTask.java
+++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/TrainedModelDeploymentTask.java
@@ -171,16 +171,8 @@ public void infer(
             );
             return;
         }
-        trainedModelAssignmentNodeService.infer(
-            this,
-            update.apply(inferenceConfigHolder.get()),
-            input,
-            skipQueue,
-            timeout,
-            prefixType,
-            parentActionTask,
-            listener
-        );
+        var updatedConfig = update.isEmpty() ? inferenceConfigHolder.get() : inferenceConfigHolder.get().apply(update);
+        trainedModelAssignmentNodeService.infer(this, updatedConfig, input, skipQueue, timeout, prefixType, parentActionTask, listener);
     }
 
     public Optional modelStats() {
diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/loadingservice/LocalModel.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/loadingservice/LocalModel.java
index ffd70849d8f1c..fe0bd18b2147a 100644
--- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/loadingservice/LocalModel.java
+++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/loadingservice/LocalModel.java
@@ -175,7 +175,10 @@ public void infer(Map fields, InferenceConfigUpdate update, Acti
                 listener.onResponse(new WarningInferenceResults(Messages.getMessage(INFERENCE_WARNING_ALL_FIELDS_MISSING, modelId)));
                 return;
             }
-            InferenceResults inferenceResults = trainedModelDefinition.infer(flattenedFields, update.apply(inferenceConfig));
+            InferenceResults inferenceResults = trainedModelDefinition.infer(
+                flattenedFields,
+                update.isEmpty() ? inferenceConfig : inferenceConfig.apply(update)
+            );
             if (shouldPersistStats) {
                 persistStats(false);
             }
diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/ZeroShotClassificationProcessorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/ZeroShotClassificationProcessorTests.java
index ce9d7a9d3640b..d8f1a1fd7433d 100644
--- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/ZeroShotClassificationProcessorTests.java
+++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/ZeroShotClassificationProcessorTests.java
@@ -47,7 +47,7 @@ public void testBuildRequest() throws IOException {
         ZeroShotClassificationProcessor processor = new ZeroShotClassificationProcessor(tokenizer, config);
 
         NlpTask.Request request = processor.getRequestBuilder(
-            (NlpConfig) new ZeroShotClassificationConfigUpdate.Builder().setLabels(List.of("new", "stuff")).build().apply(config)
+            (NlpConfig) config.apply(new ZeroShotClassificationConfigUpdate.Builder().setLabels(List.of("new", "stuff")).build())
         ).buildRequest(List.of("Elasticsearch fun"), "request1", Tokenization.Truncate.NONE, -1);
 
         Map jsonDocAsMap = XContentHelper.convertToMap(request.processInput(), true, XContentType.JSON).v2();

From 971cfb9317fc64ca04fb75d0e463419166f6ab84 Mon Sep 17 00:00:00 2001
From: Valeriy Khakhutskyy <1292899+valeriy42@users.noreply.github.com>
Date: Fri, 12 Jan 2024 11:00:19 +0100
Subject: [PATCH 63/75] [ML] Refactor assignment planner code (#104260)

This PR simplifies the code in a few places. In other places where I had the TODO comment, the possible simplification would lead to undesired consequences, so I removed the TODO comment with the reference to issue #101612.

Closes #101612
---
 .../TrainedModelAssignmentRebalancer.java          | 10 +++-------
 .../planning/AbstractPreserveAllocations.java      |  3 +--
 .../assignment/planning/AssignmentPlan.java        | 14 +++++++-------
 .../planning/RandomizedAssignmentRounding.java     |  2 --
 .../planning/ZoneAwareAssignmentPlanner.java       | 12 +++---------
 5 files changed, 14 insertions(+), 27 deletions(-)

diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentRebalancer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentRebalancer.java
index a1142796558f4..ef8af6af445fb 100644
--- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentRebalancer.java
+++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentRebalancer.java
@@ -142,13 +142,9 @@ private static void copyAssignments(
             for (Map.Entry assignment : nodeAssignments.entrySet()) {
                 AssignmentPlan.Node originalNode = originalNodeById.get(assignment.getKey().id());
                 dest.assignModelToNode(m, originalNode, assignment.getValue());
-                if (m.currentAllocationsByNodeId().containsKey(originalNode.id())) {
-                    // TODO (#101612) requiredMemory should be calculated by the AssignmentPlan.Builder
-                    // As the node has all its available memory we need to manually account memory of models with
-                    // current allocations.
-                    long requiredMemory = m.estimateMemoryUsageBytes(m.currentAllocationsByNodeId().get(originalNode.id()));
-                    dest.accountMemory(m, originalNode, requiredMemory);
-                }
+                // As the node has all its available memory we need to manually account memory of models with
+                // current allocations.
+                dest.accountMemory(m, originalNode);
             }
         }
     }
diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AbstractPreserveAllocations.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AbstractPreserveAllocations.java
index 026b433a8c2d4..98988ffa11055 100644
--- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AbstractPreserveAllocations.java
+++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AbstractPreserveAllocations.java
@@ -68,7 +68,7 @@ Deployment modifyModelPreservingPreviousAssignments(Deployment m) {
     AssignmentPlan mergePreservedAllocations(AssignmentPlan assignmentPlan) {
         // As the model/node objects the assignment plan are the modified ones,
         // they will not match the models/nodes members we have in this class.
-        // Therefore, we build a lookup table based on the ids so we can merge the plan
+        // Therefore, we build a lookup table based on the ids, so we can merge the plan
         // with its preserved allocations.
         final Map, Integer> plannedAssignmentsByModelNodeIdPair = new HashMap<>();
         for (Deployment m : assignmentPlan.models()) {
@@ -80,7 +80,6 @@ AssignmentPlan mergePreservedAllocations(AssignmentPlan assignmentPlan) {
 
         AssignmentPlan.Builder mergedPlanBuilder = AssignmentPlan.builder(nodes, deployments);
         for (Node n : nodes) {
-            // TODO (#101612) Should the first loop happen in the builder constructor?
             for (Deployment deploymentAllocationsToPreserve : deployments) {
 
                 // if the model m is already allocated on the node n and I want to preserve this allocation
diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AssignmentPlan.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AssignmentPlan.java
index d9cb0f08a6cd0..123c728587604 100644
--- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AssignmentPlan.java
+++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AssignmentPlan.java
@@ -401,8 +401,7 @@ public Builder assignModelToNode(Deployment deployment, Node node, int allocatio
             if (allocations <= 0) {
                 return this;
             }
-            if (/*isAlreadyAssigned(deployment, node) == false
-                &&*/ requiredMemory > remainingNodeMemory.get(node)) {
+            if (requiredMemory > remainingNodeMemory.get(node)) {
                 throw new IllegalArgumentException(
                     "not enough memory on node ["
                         + node.id()
@@ -448,13 +447,14 @@ private static int getCurrentAllocations(Deployment m, Node n) {
         }
 
         public void accountMemory(Deployment m, Node n) {
-            // TODO (#101612) remove or refactor unused method
-            long requiredMemory = getDeploymentMemoryRequirement(m, n, getCurrentAllocations(m, n));
-            accountMemory(m, n, requiredMemory);
+            if (m.currentAllocationsByNodeId().containsKey(n.id())) {
+                int allocations = m.currentAllocationsByNodeId().get(n.id());
+                long requiredMemory = m.estimateMemoryUsageBytes(allocations);
+                accountMemory(m, n, requiredMemory);
+            }
         }
 
-        public void accountMemory(Deployment m, Node n, long requiredMemory) {
-            // TODO (#101612) computation of required memory should be done internally
+        private void accountMemory(Deployment m, Node n, long requiredMemory) {
             remainingNodeMemory.computeIfPresent(n, (k, v) -> v - requiredMemory);
             if (remainingNodeMemory.containsKey(n) && remainingNodeMemory.get(n) < 0) {
                 throw new IllegalArgumentException("not enough memory on node [" + n.id() + "] to assign model [" + m.id() + "]");
diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/RandomizedAssignmentRounding.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/RandomizedAssignmentRounding.java
index 8bdc99998a0c2..81696cd20d922 100644
--- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/RandomizedAssignmentRounding.java
+++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/RandomizedAssignmentRounding.java
@@ -310,8 +310,6 @@ private void unassignOversizedModels(Node n) {
         private AssignmentPlan toPlan() {
             AssignmentPlan.Builder builder = AssignmentPlan.builder(nodes, deployments);
             for (Map.Entry, Integer> assignment : tryAssigningRemainingCores().entrySet()) {
-                // TODO (#101612) The model should be assigned to the node only when it is possible. This means, that canAssign should be
-                // integrated into the assignModelToNode.
                 if (builder.canAssign(assignment.getKey().v1(), assignment.getKey().v2(), assignment.getValue())) {
                     builder.assignModelToNode(assignment.getKey().v1(), assignment.getKey().v2(), assignment.getValue());
                 }
diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/ZoneAwareAssignmentPlanner.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/ZoneAwareAssignmentPlanner.java
index 8c9499ca9e00c..9af2e4cd49b17 100644
--- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/ZoneAwareAssignmentPlanner.java
+++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/ZoneAwareAssignmentPlanner.java
@@ -183,15 +183,9 @@ private AssignmentPlan swapOriginalModelsInPlan(
             for (Map.Entry assignment : nodeAssignments.entrySet()) {
                 Node originalNode = originalNodeById.get(assignment.getKey().id());
                 planBuilder.assignModelToNode(originalDeployment, originalNode, assignment.getValue());
-                if (originalDeployment.currentAllocationsByNodeId().containsKey(originalNode.id())) {
-                    // TODO (#101612) requiredMemory should be calculated by the AssignmentPlan.Builder
-                    // As the node has all its available memory we need to manually account memory of models with
-                    // current allocations.
-                    long requiredMemory = originalDeployment.estimateMemoryUsageBytes(
-                        originalDeployment.currentAllocationsByNodeId().get(originalNode.id())
-                    );
-                    planBuilder.accountMemory(m, originalNode, requiredMemory);
-                }
+                // As the node has all its available memory we need to manually account memory of models with
+                // current allocations.
+                planBuilder.accountMemory(originalDeployment, originalNode);
             }
         }
         return planBuilder.build();

From 2a84b62efcf9673a44068afd75290d3c6bed1f97 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Lorenzo=20Dematt=C3=A9?= 
Date: Fri, 12 Jan 2024 11:07:17 +0100
Subject: [PATCH 64/75] Fix: remove wrong assertion from
 `ESRestTestFeatureService#clusterHasFeature` (#104299)

The check is currently broken - feature check always falls back to historical features, and only then if the feature is not one of the historical ones, and exception (assert) is risen.
This is wrong: it could be that we are testing for a non-historical feature, and the cluster simply does not have it.
---
 .../test/rest/ESRestTestCase.java             | 26 ++++++++++++++-----
 .../test/rest/ESRestTestFeatureService.java   | 26 ++++---------------
 2 files changed, 24 insertions(+), 28 deletions(-)

diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java
index 20cd1997fd70e..a2806663ff321 100644
--- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java
+++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java
@@ -59,6 +59,7 @@
 import org.elasticsearch.core.IOUtils;
 import org.elasticsearch.core.Nullable;
 import org.elasticsearch.core.PathUtils;
+import org.elasticsearch.core.SuppressForbidden;
 import org.elasticsearch.core.TimeValue;
 import org.elasticsearch.core.UpdateForV9;
 import org.elasticsearch.features.FeatureSpecification;
@@ -84,6 +85,7 @@
 import org.junit.Before;
 
 import java.io.BufferedReader;
+import java.io.File;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.InputStreamReader;
@@ -333,19 +335,28 @@ public void initClient() throws IOException {
         assert nodesVersions != null;
     }
 
-    protected static TestFeatureService createTestFeatureService(
+    protected TestFeatureService createTestFeatureService(
         Map> clusterStateFeatures,
         Set semanticNodeVersions
     ) {
         // Historical features information is unavailable when using legacy test plugins
         boolean hasHistoricalFeaturesInformation = System.getProperty("tests.features.metadata.path") != null;
-        var providers = hasHistoricalFeaturesInformation
-            ? List.of(new RestTestLegacyFeatures(), new ESRestTestCaseHistoricalFeatures())
-            : List.of(new RestTestLegacyFeatures());
+
+        final List featureSpecifications;
+        if (hasHistoricalFeaturesInformation) {
+            featureSpecifications = List.of(new RestTestLegacyFeatures(), new ESRestTestCaseHistoricalFeatures());
+        } else {
+            logger.warn(
+                "This test is running on the legacy test framework; historical features from production code will not be available. "
+                    + "You need to port the test to the new test plugins in order to use historical features from production code. "
+                    + "If this is a legacy feature used only in tests, you can add it to a test-only FeatureSpecification such as {}.",
+                RestTestLegacyFeatures.class.getCanonicalName()
+            );
+            featureSpecifications = List.of(new RestTestLegacyFeatures());
+        }
 
         return new ESRestTestFeatureService(
-            hasHistoricalFeaturesInformation,
-            providers,
+            featureSpecifications,
             semanticNodeVersions,
             ClusterFeatures.calculateAllNodeFeatures(clusterStateFeatures.values())
         );
@@ -2343,6 +2354,7 @@ private static class ESRestTestCaseHistoricalFeatures implements FeatureSpecific
         private static Map historicalFeatures;
 
         @Override
+        @SuppressForbidden(reason = "File#pathSeparator has not equivalent in java.nio.file")
         public Map getHistoricalFeatures() {
             if (historicalFeatures == null) {
                 Map historicalFeaturesMap = new HashMap<>();
@@ -2353,7 +2365,7 @@ public Map getHistoricalFeatures() {
                     );
                 }
 
-                String[] metadataFiles = metadataPath.split(System.getProperty("path.separator"));
+                String[] metadataFiles = metadataPath.split(File.pathSeparator);
                 for (String metadataFile : metadataFiles) {
                     try (
                         InputStream in = Files.newInputStream(PathUtils.get(metadataFile));
diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestFeatureService.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestFeatureService.java
index 5bb22058e4688..a73c43f4fc46a 100644
--- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestFeatureService.java
+++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestFeatureService.java
@@ -9,11 +9,11 @@
 package org.elasticsearch.test.rest;
 
 import org.elasticsearch.Version;
-import org.elasticsearch.core.Strings;
 import org.elasticsearch.features.FeatureData;
 import org.elasticsearch.features.FeatureSpecification;
 
 import java.util.Collection;
+import java.util.Comparator;
 import java.util.List;
 import java.util.NavigableMap;
 import java.util.Set;
@@ -24,33 +24,17 @@ class ESRestTestFeatureService implements TestFeatureService {
     private final Set clusterStateFeatures;
 
     ESRestTestFeatureService(
-        boolean hasHistoricalFeaturesInformation,
         List specs,
         Collection nodeVersions,
         Set clusterStateFeatures
     ) {
-        var minNodeVersion = nodeVersions.stream().min(Version::compareTo);
+        var minNodeVersion = nodeVersions.stream().min(Comparator.naturalOrder());
         var featureData = FeatureData.createFromSpecifications(specs);
         var historicalFeatures = featureData.getHistoricalFeatures();
-        var allHistoricalFeatures = historicalFeatures.lastEntry() == null ? Set.of() : historicalFeatures.lastEntry().getValue();
 
-        var errorMessage = Strings.format(
-            hasHistoricalFeaturesInformation
-                ? "Check the feature has been added to the correct FeatureSpecification in the relevant module or, if this is a "
-                    + "legacy feature used only in tests, to a test-only FeatureSpecification such as %s."
-                : "This test is running on the legacy test framework; historical features from production code will not be available. "
-                    + "You need to port the test to the new test plugins in order to use historical features from production code. "
-                    + "If this is a legacy feature used only in tests, you can add it to a test-only FeatureSpecification such as %s.",
-            RestTestLegacyFeatures.class.getCanonicalName()
-        );
-        this.historicalFeaturesPredicate = minNodeVersion.>map(v -> featureId -> {
-            assert allHistoricalFeatures.contains(featureId) : Strings.format("Unknown historical feature %s: %s", featureId, errorMessage);
-            return hasHistoricalFeature(historicalFeatures, v, featureId);
-        }).orElse(featureId -> {
-            // We can safely assume that new non-semantic versions (serverless) support all historical features
-            assert allHistoricalFeatures.contains(featureId) : Strings.format("Unknown historical feature %s: %s", featureId, errorMessage);
-            return true;
-        });
+        this.historicalFeaturesPredicate = minNodeVersion.>map(
+            v -> featureId -> hasHistoricalFeature(historicalFeatures, v, featureId)
+        ).orElse(featureId -> true); // We can safely assume that new non-semantic versions (serverless) support all historical features
         this.clusterStateFeatures = clusterStateFeatures;
     }
 

From 6187e905564b8a7b1cc346759bdaa09bfa913f9f Mon Sep 17 00:00:00 2001
From: Armin Braun 
Date: Fri, 12 Jan 2024 11:41:59 +0100
Subject: [PATCH 65/75] Replace some ActionListener.wrap with more efficient
 delegateFailureAndWrap in ml module (#103891)

There's loads of these spots in the ml codebase. We can save some code,
memory and improve readability here by moving to the new
delegateFailureAndWrap.
---
 .../xpack/ml/MachineLearning.java             | 169 ++++++++----------
 .../TransportDeleteCalendarEventAction.java   |  90 +++++-----
 .../action/TransportDeleteDatafeedAction.java |  22 ++-
 .../TransportDeleteExpiredDataAction.java     |   9 +-
 .../action/TransportDeleteFilterAction.java   |   8 +-
 .../ml/action/TransportDeleteJobAction.java   |  27 ++-
 .../TransportDeleteTrainedModelAction.java    |  23 +--
 .../TransportEvaluateDataFrameAction.java     |  12 +-
 ...nsportExplainDataFrameAnalyticsAction.java |  35 ++--
 .../TransportFinalizeJobExecutionAction.java  |   8 +-
 .../ml/action/TransportFlushJobAction.java    |  17 +-
 .../ml/action/TransportGetBucketsAction.java  |  12 +-
 .../TransportGetCalendarEventsAction.java     |  26 +--
 .../TransportGetDataFrameAnalyticsAction.java |   2 +-
 .../ml/action/TransportGetFiltersAction.java  |   2 +-
 .../action/TransportGetJobsStatsAction.java   |   9 +-
 .../TransportGetMlAutoscalingStats.java       |   2 +-
 .../TransportGetModelSnapshotsAction.java     |   2 +-
 .../TransportGetOverallBucketsAction.java     |  40 +++--
 ...nsportPreviewDataFrameAnalyticsAction.java |  10 +-
 .../TransportPreviewDatafeedAction.java       |  24 ++-
 .../TransportPutDataFrameAnalyticsAction.java |  36 ++--
 .../TransportPutTrainedModelAction.java       |  32 +---
 .../extractor/DataFrameDataExtractor.java     |  15 +-
 24 files changed, 274 insertions(+), 358 deletions(-)

diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java
index 61835c4838110..09cb8644dba4f 100644
--- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java
+++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java
@@ -1963,7 +1963,7 @@ public void prepareForIndicesMigration(ClusterService clusterService, Client cli
         originClient.execute(
             SetUpgradeModeAction.INSTANCE,
             new SetUpgradeModeAction.Request(true),
-            ActionListener.wrap(r -> listener.onResponse(Collections.singletonMap("already_in_upgrade_mode", false)), listener::onFailure)
+            listener.delegateFailureAndWrap((l, r) -> l.onResponse(Collections.singletonMap("already_in_upgrade_mode", false)))
         );
     }
 
@@ -1985,7 +1985,7 @@ public void indicesMigrationComplete(
         originClient.execute(
             SetUpgradeModeAction.INSTANCE,
             new SetUpgradeModeAction.Request(false),
-            ActionListener.wrap(r -> listener.onResponse(r.isAcknowledged()), listener::onFailure)
+            listener.delegateFailureAndWrap((l, r) -> l.onResponse(r.isAcknowledged()))
         );
     }
 
@@ -2086,40 +2086,39 @@ public void cleanUpFeature(
             }
         );
 
-        ActionListener afterWaitingForTasks = ActionListener.wrap(listTasksResponse -> {
-            listTasksResponse.rethrowFailures("Waiting for indexing requests for .ml-* indices");
-            if (results.values().stream().allMatch(b -> b)) {
-                if (memoryTracker.get() != null) {
-                    memoryTracker.get()
-                        .awaitAndClear(
-                            ActionListener.wrap(
-                                cacheCleared -> SystemIndexPlugin.super.cleanUpFeature(clusterService, client, unsetResetModeListener),
-                                clearFailed -> {
-                                    logger.error(
-                                        "failed to clear memory tracker cache via machine learning reset feature API",
-                                        clearFailed
-                                    );
-                                    SystemIndexPlugin.super.cleanUpFeature(clusterService, client, unsetResetModeListener);
-                                }
-                            )
-                        );
-                    return;
+        // Stop all model deployments
+        ActionListener pipelineValidation = unsetResetModeListener.delegateFailureAndWrap(
+            (delegate, listTasksResponse) -> {
+                listTasksResponse.rethrowFailures("Waiting for indexing requests for .ml-* indices");
+                if (results.values().stream().allMatch(b -> b)) {
+                    if (memoryTracker.get() != null) {
+                        memoryTracker.get()
+                            .awaitAndClear(
+                                ActionListener.wrap(
+                                    cacheCleared -> SystemIndexPlugin.super.cleanUpFeature(clusterService, client, delegate),
+                                    clearFailed -> {
+                                        logger.error(
+                                            "failed to clear memory tracker cache via machine learning reset feature API",
+                                            clearFailed
+                                        );
+                                        SystemIndexPlugin.super.cleanUpFeature(clusterService, client, delegate);
+                                    }
+                                )
+                            );
+                        return;
+                    }
+                    // Call into the original listener to clean up the indices and then clear ml memory cache
+                    SystemIndexPlugin.super.cleanUpFeature(clusterService, client, delegate);
+                } else {
+                    final List failedComponents = results.entrySet()
+                        .stream()
+                        .filter(result -> result.getValue() == false)
+                        .map(Map.Entry::getKey)
+                        .toList();
+                    delegate.onFailure(new RuntimeException("Some machine learning components failed to reset: " + failedComponents));
                 }
-                // Call into the original listener to clean up the indices and then clear ml memory cache
-                SystemIndexPlugin.super.cleanUpFeature(clusterService, client, unsetResetModeListener);
-            } else {
-                final List failedComponents = results.entrySet()
-                    .stream()
-                    .filter(result -> result.getValue() == false)
-                    .map(Map.Entry::getKey)
-                    .toList();
-                unsetResetModeListener.onFailure(
-                    new RuntimeException("Some machine learning components failed to reset: " + failedComponents)
-                );
             }
-        }, unsetResetModeListener::onFailure);
-
-        ActionListener afterDataframesStopped = ActionListener.wrap(dataFrameStopResponse -> {
+        ).delegateFailureAndWrap((delegate, dataFrameStopResponse) -> {
             // Handle the response
             results.put("data_frame/analytics", dataFrameStopResponse.isStopped());
             if (results.values().stream().allMatch(b -> b)) {
@@ -2129,7 +2128,7 @@ public void cleanUpFeature(
                     // This waits for all xpack actions including: allocations, anomaly detections, analytics
                     .setActions("xpack/ml/*")
                     .setWaitForCompletion(true)
-                    .execute(ActionListener.wrap(listMlTasks -> {
+                    .execute(delegate.delegateFailureAndWrap((l, listMlTasks) -> {
                         listMlTasks.rethrowFailures("Waiting for machine learning tasks");
                         client.admin()
                             .cluster()
@@ -2138,48 +2137,37 @@ public void cleanUpFeature(
                             .setDetailed(true)
                             .setWaitForCompletion(true)
                             .setDescriptions("*.ml-*")
-                            .execute(afterWaitingForTasks);
-                    }, unsetResetModeListener::onFailure));
+                            .execute(l);
+                    }));
             } else {
                 final List failedComponents = results.entrySet()
                     .stream()
                     .filter(result -> result.getValue() == false)
                     .map(Map.Entry::getKey)
                     .toList();
-                unsetResetModeListener.onFailure(
-                    new RuntimeException("Some machine learning components failed to reset: " + failedComponents)
-                );
+                delegate.onFailure(new RuntimeException("Some machine learning components failed to reset: " + failedComponents));
             }
-        }, unsetResetModeListener::onFailure);
-
-        ActionListener afterAnomalyDetectionClosed = ActionListener.wrap(closeJobResponse -> {
+        }).delegateFailureAndWrap((delegate, closeJobResponse) -> {
             // Handle the response
             results.put("anomaly_detectors", closeJobResponse.isClosed());
             if (machineLearningExtension.get().isDataFrameAnalyticsEnabled() == false) {
-                afterDataframesStopped.onResponse(new StopDataFrameAnalyticsAction.Response(true));
+                delegate.onResponse(new StopDataFrameAnalyticsAction.Response(true));
                 return;
             }
             // Stop data frame analytics
             StopDataFrameAnalyticsAction.Request stopDataFramesReq = new StopDataFrameAnalyticsAction.Request("_all").setAllowNoMatch(true);
-            client.execute(
-                StopDataFrameAnalyticsAction.INSTANCE,
-                stopDataFramesReq,
-                ActionListener.wrap(afterDataframesStopped::onResponse, failure -> {
-                    logger.warn(
-                        "failed stopping data frame analytics jobs for machine learning feature reset. Attempting with force=true",
-                        failure
-                    );
-                    client.execute(StopDataFrameAnalyticsAction.INSTANCE, stopDataFramesReq.setForce(true), afterDataframesStopped);
-                })
-            );
-        }, unsetResetModeListener::onFailure);
-
-        // Close anomaly detection jobs
-        ActionListener afterDataFeedsStopped = ActionListener.wrap(datafeedResponse -> {
+            client.execute(StopDataFrameAnalyticsAction.INSTANCE, stopDataFramesReq, ActionListener.wrap(delegate::onResponse, failure -> {
+                logger.warn(
+                    "failed stopping data frame analytics jobs for machine learning feature reset. Attempting with force=true",
+                    failure
+                );
+                client.execute(StopDataFrameAnalyticsAction.INSTANCE, stopDataFramesReq.setForce(true), delegate);
+            }));
+        }).delegateFailureAndWrap((delegate, datafeedResponse) -> {
             // Handle the response
             results.put("datafeeds", datafeedResponse.isStopped());
             if (machineLearningExtension.get().isAnomalyDetectionEnabled() == false) {
-                afterAnomalyDetectionClosed.onResponse(new CloseJobAction.Response(true));
+                delegate.onResponse(new CloseJobAction.Response(true));
                 return;
             }
             CloseJobAction.Request closeJobsRequest = new CloseJobAction.Request().setAllowNoMatch(true).setJobId("_all");
@@ -2187,65 +2175,48 @@ public void cleanUpFeature(
             client.execute(
                 KillProcessAction.INSTANCE,
                 new KillProcessAction.Request("*"),
-                ActionListener.wrap(
+                delegate.delegateFailureAndWrap(
                     // If successful, close and wait for jobs
-                    success -> client.execute(
+                    (l, success) -> client.execute(
                         CloseJobAction.INSTANCE,
                         closeJobsRequest,
-                        ActionListener.wrap(afterAnomalyDetectionClosed::onResponse, failure -> {
+                        ActionListener.wrap(l::onResponse, failure -> {
                             logger.warn(
                                 "failed closing anomaly jobs for machine learning feature reset. Attempting with force=true",
                                 failure
                             );
-                            client.execute(CloseJobAction.INSTANCE, closeJobsRequest.setForce(true), afterAnomalyDetectionClosed);
+                            client.execute(CloseJobAction.INSTANCE, closeJobsRequest.setForce(true), l);
                         })
-                    ),
-                    unsetResetModeListener::onFailure
+                    )
                 )
             );
-        }, unsetResetModeListener::onFailure);
-
-        // Stop data feeds
-        ActionListener cancelSnapshotUpgradesListener = ActionListener.wrap(
-            cancelUpgradesResponse -> {
-                if (machineLearningExtension.get().isAnomalyDetectionEnabled() == false) {
-                    afterDataFeedsStopped.onResponse(new StopDatafeedAction.Response(true));
-                    return;
-                }
-                StopDatafeedAction.Request stopDatafeedsReq = new StopDatafeedAction.Request("_all").setAllowNoMatch(true);
-                client.execute(
-                    StopDatafeedAction.INSTANCE,
-                    stopDatafeedsReq,
-                    ActionListener.wrap(afterDataFeedsStopped::onResponse, failure -> {
-                        logger.warn("failed stopping datafeeds for machine learning feature reset. Attempting with force=true", failure);
-                        client.execute(StopDatafeedAction.INSTANCE, stopDatafeedsReq.setForce(true), afterDataFeedsStopped);
-                    })
-                );
-            },
-            unsetResetModeListener::onFailure
-        );
-
-        // Cancel model snapshot upgrades
-        ActionListener stopDeploymentsListener = ActionListener.wrap(acknowledgedResponse -> {
+        }).delegateFailureAndWrap((delegate, cancelUpgradesResponse) -> {
+            if (machineLearningExtension.get().isAnomalyDetectionEnabled() == false) {
+                delegate.onResponse(new StopDatafeedAction.Response(true));
+                return;
+            }
+            StopDatafeedAction.Request stopDatafeedsReq = new StopDatafeedAction.Request("_all").setAllowNoMatch(true);
+            client.execute(StopDatafeedAction.INSTANCE, stopDatafeedsReq, ActionListener.wrap(delegate::onResponse, failure -> {
+                logger.warn("failed stopping datafeeds for machine learning feature reset. Attempting with force=true", failure);
+                client.execute(StopDatafeedAction.INSTANCE, stopDatafeedsReq.setForce(true), delegate);
+            }));
+        }).delegateFailureAndWrap((delegate, acknowledgedResponse) -> {
             if (machineLearningExtension.get().isAnomalyDetectionEnabled() == false) {
-                cancelSnapshotUpgradesListener.onResponse(new CancelJobModelSnapshotUpgradeAction.Response(true));
+                delegate.onResponse(new CancelJobModelSnapshotUpgradeAction.Response(true));
                 return;
             }
             CancelJobModelSnapshotUpgradeAction.Request cancelSnapshotUpgradesReq = new CancelJobModelSnapshotUpgradeAction.Request(
                 "_all",
                 "_all"
             );
-            client.execute(CancelJobModelSnapshotUpgradeAction.INSTANCE, cancelSnapshotUpgradesReq, cancelSnapshotUpgradesListener);
-        }, unsetResetModeListener::onFailure);
-
-        // Stop all model deployments
-        ActionListener pipelineValidation = ActionListener.wrap(acknowledgedResponse -> {
+            client.execute(CancelJobModelSnapshotUpgradeAction.INSTANCE, cancelSnapshotUpgradesReq, delegate);
+        }).delegateFailureAndWrap((delegate, acknowledgedResponse) -> {
             if (trainedModelAllocationClusterServiceSetOnce.get() == null || machineLearningExtension.get().isNlpEnabled() == false) {
-                stopDeploymentsListener.onResponse(AcknowledgedResponse.TRUE);
+                delegate.onResponse(AcknowledgedResponse.TRUE);
                 return;
             }
-            trainedModelAllocationClusterServiceSetOnce.get().removeAllModelAssignments(stopDeploymentsListener);
-        }, unsetResetModeListener::onFailure);
+            trainedModelAllocationClusterServiceSetOnce.get().removeAllModelAssignments(delegate);
+        });
 
         // validate no pipelines are using machine learning models
         ActionListener afterResetModeSet = ActionListener.wrap(acknowledgedResponse -> {
diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarEventAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarEventAction.java
index 60993e12a2088..e4c73106852ef 100644
--- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarEventAction.java
+++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarEventAction.java
@@ -65,50 +65,54 @@ public TransportDeleteCalendarEventAction(
     protected void doExecute(Task task, DeleteCalendarEventAction.Request request, ActionListener listener) {
         final String eventId = request.getEventId();
 
-        ActionListener calendarListener = ActionListener.wrap(calendar -> {
-            GetRequest getRequest = new GetRequest(MlMetaIndex.indexName(), eventId);
-            executeAsyncWithOrigin(client, ML_ORIGIN, TransportGetAction.TYPE, getRequest, ActionListener.wrap(getResponse -> {
-                if (getResponse.isExists() == false) {
-                    listener.onFailure(new ResourceNotFoundException("No event with id [" + eventId + "]"));
-                    return;
-                }
-
-                Map source = getResponse.getSourceAsMap();
-                String calendarId = (String) source.get(Calendar.ID.getPreferredName());
-                if (calendarId == null) {
-                    listener.onFailure(
-                        ExceptionsHelper.badRequestException(
-                            "Event [" + eventId + "] does not have a valid " + Calendar.ID.getPreferredName()
-                        )
-                    );
-                    return;
-                }
-
-                if (calendarId.equals(request.getCalendarId()) == false) {
-                    listener.onFailure(
-                        ExceptionsHelper.badRequestException(
-                            "Event ["
-                                + eventId
-                                + "] has "
-                                + Calendar.ID.getPreferredName()
-                                + " ["
-                                + calendarId
-                                + "] which does not match the request "
-                                + Calendar.ID.getPreferredName()
-                                + " ["
-                                + request.getCalendarId()
-                                + "]"
-                        )
-                    );
-                    return;
-                }
-
-                deleteEvent(eventId, calendar, listener);
-            }, listener::onFailure));
-        }, listener::onFailure);
-
         // Get the calendar first so we check the calendar exists before checking the event exists
-        jobResultsProvider.calendar(request.getCalendarId(), calendarListener);
+        jobResultsProvider.calendar(request.getCalendarId(), listener.delegateFailureAndWrap((l, calendar) -> {
+            GetRequest getRequest = new GetRequest(MlMetaIndex.indexName(), eventId);
+            executeAsyncWithOrigin(
+                client,
+                ML_ORIGIN,
+                TransportGetAction.TYPE,
+                getRequest,
+                l.delegateFailureAndWrap((delegate, getResponse) -> {
+                    if (getResponse.isExists() == false) {
+                        delegate.onFailure(new ResourceNotFoundException("No event with id [" + eventId + "]"));
+                        return;
+                    }
+
+                    Map source = getResponse.getSourceAsMap();
+                    String calendarId = (String) source.get(Calendar.ID.getPreferredName());
+                    if (calendarId == null) {
+                        delegate.onFailure(
+                            ExceptionsHelper.badRequestException(
+                                "Event [" + eventId + "] does not have a valid " + Calendar.ID.getPreferredName()
+                            )
+                        );
+                        return;
+                    }
+
+                    if (calendarId.equals(request.getCalendarId()) == false) {
+                        delegate.onFailure(
+                            ExceptionsHelper.badRequestException(
+                                "Event ["
+                                    + eventId
+                                    + "] has "
+                                    + Calendar.ID.getPreferredName()
+                                    + " ["
+                                    + calendarId
+                                    + "] which does not match the request "
+                                    + Calendar.ID.getPreferredName()
+                                    + " ["
+                                    + request.getCalendarId()
+                                    + "]"
+                            )
+                        );
+                        return;
+                    }
+
+                    deleteEvent(eventId, calendar, delegate);
+                })
+            );
+        }));
     }
 
     private void deleteEvent(String eventId, Calendar calendar, ActionListener listener) {
diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteDatafeedAction.java
index 64ad51fc0f722..49c6021a6ed8b 100644
--- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteDatafeedAction.java
+++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteDatafeedAction.java
@@ -84,19 +84,17 @@ private void forceDeleteDatafeed(
         ClusterState state,
         ActionListener listener
     ) {
-        ActionListener finalListener = ActionListener.wrap(
-            // use clusterService.state() here so that the updated state without the task is available
-            response -> datafeedManager.deleteDatafeed(request, clusterService.state(), listener),
-            listener::onFailure
-        );
-
-        ActionListener isolateDatafeedHandler = ActionListener.wrap(
-            response -> removeDatafeedTask(request, state, finalListener),
-            listener::onFailure
-        );
-
         IsolateDatafeedAction.Request isolateDatafeedRequest = new IsolateDatafeedAction.Request(request.getDatafeedId());
-        executeAsyncWithOrigin(client, ML_ORIGIN, IsolateDatafeedAction.INSTANCE, isolateDatafeedRequest, isolateDatafeedHandler);
+        executeAsyncWithOrigin(
+            client,
+            ML_ORIGIN,
+            IsolateDatafeedAction.INSTANCE,
+            isolateDatafeedRequest,
+            listener.delegateFailureAndWrap(
+                // use clusterService.state() here so that the updated state without the task is available
+                (l, response) -> datafeedManager.deleteDatafeed(request, clusterService.state(), l)
+            ).delegateFailureAndWrap((l, response) -> removeDatafeedTask(request, state, l))
+        );
     }
 
     private void removeDatafeedTask(DeleteDatafeedAction.Request request, ClusterState state, ActionListener listener) {
diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteExpiredDataAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteExpiredDataAction.java
index b28d37022e171..ad85f22873cce 100644
--- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteExpiredDataAction.java
+++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteExpiredDataAction.java
@@ -146,16 +146,15 @@ protected void doExecute(
                 false,
                 true,
                 null,
-                ActionListener.wrap(
-                    jobBuilders -> threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME)
-                        .execute(ActionRunnable.wrap(listener, l -> {
+                listener.delegateFailureAndWrap(
+                    (delegate, jobBuilders) -> threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME)
+                        .execute(ActionRunnable.wrap(delegate, l -> {
                             List jobs = jobBuilders.stream().map(Job.Builder::build).collect(Collectors.toList());
                             String[] jobIds = jobs.stream().map(Job::getId).toArray(String[]::new);
                             request.setExpandedJobIds(jobIds);
                             List dataRemovers = createDataRemovers(jobs, taskId, anomalyDetectionAuditor);
                             deleteExpiredData(request, dataRemovers, l, isTimedOutSupplier);
-                        })),
-                    listener::onFailure
+                        }))
                 )
             );
         }
diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteFilterAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteFilterAction.java
index 45bbd6256c205..ceae2a680feb0 100644
--- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteFilterAction.java
+++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteFilterAction.java
@@ -63,16 +63,16 @@ public TransportDeleteFilterAction(
     @Override
     protected void doExecute(Task task, DeleteFilterAction.Request request, ActionListener listener) {
         final String filterId = request.getFilterId();
-        jobConfigProvider.findJobsWithCustomRules(ActionListener.wrap(jobs -> {
+        jobConfigProvider.findJobsWithCustomRules(listener.delegateFailureAndWrap((delegate, jobs) -> {
             List currentlyUsedBy = findJobsUsingFilter(jobs, filterId);
             if (currentlyUsedBy.isEmpty() == false) {
-                listener.onFailure(
+                delegate.onFailure(
                     ExceptionsHelper.conflictStatusException(Messages.getMessage(Messages.FILTER_CANNOT_DELETE, filterId, currentlyUsedBy))
                 );
             } else {
-                deleteFilter(filterId, listener);
+                deleteFilter(filterId, delegate);
             }
-        }, listener::onFailure));
+        }));
     }
 
     private static List findJobsUsingFilter(List jobs, String filterId) {
diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java
index f3b0fcd669637..f694e85144b48 100644
--- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java
+++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java
@@ -167,28 +167,23 @@ protected void masterOperation(
             }
         );
 
-        ActionListener markAsDeletingListener = finalListener.delegateFailureAndWrap((delegate, response) -> {
-            if (request.isForce()) {
-                forceDeleteJob(parentTaskClient, request, state, delegate);
-            } else {
-                normalDeleteJob(parentTaskClient, request, state, delegate);
+        ActionListener datafeedDeleteListener = finalListener.delegateFailureAndWrap(
+            (delegate, response) -> {
+                if (request.isForce()) {
+                    forceDeleteJob(parentTaskClient, request, state, delegate);
+                } else {
+                    normalDeleteJob(parentTaskClient, request, state, delegate);
+                }
             }
-        });
-
-        ActionListener datafeedDeleteListener = ActionListener.wrap(response -> {
+        ).delegateFailureAndWrap((delegate, response) -> {
             auditor.info(request.getJobId(), Messages.getMessage(Messages.JOB_AUDIT_DELETING, taskId));
             cancelResetTaskIfExists(
                 request.getJobId(),
-                ActionListener.wrap(
-                    r -> jobConfigProvider.updateJobBlockReason(
-                        request.getJobId(),
-                        new Blocked(Blocked.Reason.DELETE, taskId),
-                        markAsDeletingListener
-                    ),
-                    finalListener::onFailure
+                delegate.delegateFailureAndWrap(
+                    (l, r) -> jobConfigProvider.updateJobBlockReason(request.getJobId(), new Blocked(Blocked.Reason.DELETE, taskId), l)
                 )
             );
-        }, finalListener::onFailure);
+        });
 
         ActionListener jobExistsListener = ActionListener.wrap(
             response -> deleteDatafeedIfNecessary(request, datafeedDeleteListener),
diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteTrainedModelAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteTrainedModelAction.java
index d19871d0e1b2f..7f2d0e47975e3 100644
--- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteTrainedModelAction.java
+++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteTrainedModelAction.java
@@ -110,14 +110,8 @@ protected void masterOperation(
     ) {
         logger.debug(() -> format("[%s] Request to delete trained model%s", request.getId(), request.isForce() ? " (force)" : ""));
 
-        ActionListener performDeletion = ActionListener.wrap(
-            ignored -> deleteModel(request, state, listener),
-            listener::onFailure
-        );
-
         String id = request.getId();
-
-        cancelDownloadTask(client, id, performDeletion, request.timeout());
+        cancelDownloadTask(client, id, listener.delegateFailureAndWrap((l, ignored) -> deleteModel(request, state, l)), request.timeout());
     }
 
     // package-private for testing
@@ -218,10 +212,7 @@ private void deleteModel(DeleteTrainedModelAction.Request request, ClusterState
             if (request.isForce()) {
                 forceStopDeployment(
                     request.getId(),
-                    ActionListener.wrap(
-                        stopDeploymentResponse -> deleteAliasesAndModel(request, modelAliases, listener),
-                        listener::onFailure
-                    )
+                    listener.delegateFailureAndWrap((l, stopDeploymentResponse) -> deleteAliasesAndModel(request, modelAliases, l))
                 );
             } else {
                 listener.onFailure(
@@ -250,13 +241,11 @@ private void deleteAliasesAndModel(
     ) {
         logger.debug(() -> "[" + request.getId() + "] Deleting model");
 
-        ActionListener nameDeletionListener = ActionListener.wrap(
-            ack -> trainedModelProvider.deleteTrainedModel(request.getId(), ActionListener.wrap(r -> {
+        ActionListener nameDeletionListener = listener.delegateFailureAndWrap(
+            (delegate, ack) -> trainedModelProvider.deleteTrainedModel(request.getId(), delegate.delegateFailureAndWrap((l, r) -> {
                 auditor.info(request.getId(), "trained model deleted");
-                listener.onResponse(AcknowledgedResponse.TRUE);
-            }, listener::onFailure)),
-
-            listener::onFailure
+                l.onResponse(AcknowledgedResponse.TRUE);
+            }))
         );
 
         // No reason to update cluster state, simply delete the model
diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportEvaluateDataFrameAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportEvaluateDataFrameAction.java
index 3865858f527b4..61db7f683f0f3 100644
--- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportEvaluateDataFrameAction.java
+++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportEvaluateDataFrameAction.java
@@ -82,13 +82,11 @@ protected void doExecute(
         ActionListener listener
     ) {
         TaskId parentTaskId = new TaskId(clusterService.localNode().getId(), task.getId());
-        ActionListener> resultsListener = ActionListener.wrap(unused -> {
-            EvaluateDataFrameAction.Response response = new EvaluateDataFrameAction.Response(
-                request.getEvaluation().getName(),
-                request.getEvaluation().getResults()
-            );
-            listener.onResponse(response);
-        }, listener::onFailure);
+        ActionListener> resultsListener = listener.delegateFailureAndWrap(
+            (delegate, unused) -> delegate.onResponse(
+                new EvaluateDataFrameAction.Response(request.getEvaluation().getName(), request.getEvaluation().getResults())
+            )
+        );
 
         // Create an immutable collection of parameters to be used by evaluation metrics.
         EvaluationParameters parameters = new EvaluationParameters(maxBuckets.get());
diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportExplainDataFrameAnalyticsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportExplainDataFrameAnalyticsAction.java
index d19b67b52afe1..b1f5eda679006 100644
--- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportExplainDataFrameAnalyticsAction.java
+++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportExplainDataFrameAnalyticsAction.java
@@ -147,9 +147,8 @@ private void explain(
                 ).build();
                 extractedFieldsDetectorFactory.createFromSource(
                     config,
-                    ActionListener.wrap(
-                        extractedFieldsDetector -> explain(parentTaskId, config, extractedFieldsDetector, listener),
-                        listener::onFailure
+                    listener.delegateFailureAndWrap(
+                        (l, extractedFieldsDetector) -> explain(parentTaskId, config, extractedFieldsDetector, l)
                     )
                 );
             });
@@ -160,14 +159,8 @@ private void explain(
             );
             extractedFieldsDetectorFactory.createFromSource(
                 request.getConfig(),
-                ActionListener.wrap(
-                    extractedFieldsDetector -> explain(
-                        parentTaskId,
-                        request.getConfig(),
-                        extractedFieldsDetector,
-                        responseHeaderPreservingListener
-                    ),
-                    responseHeaderPreservingListener::onFailure
+                responseHeaderPreservingListener.delegateFailureAndWrap(
+                    (l, extractedFieldsDetector) -> explain(parentTaskId, request.getConfig(), extractedFieldsDetector, l)
                 )
             );
         }
@@ -189,13 +182,14 @@ private void explain(
             );
             return;
         }
-
-        ActionListener memoryEstimationListener = ActionListener.wrap(
-            memoryEstimation -> listener.onResponse(new ExplainDataFrameAnalyticsAction.Response(fieldExtraction.v2(), memoryEstimation)),
-            listener::onFailure
+        estimateMemoryUsage(
+            parentTaskId,
+            config,
+            fieldExtraction.v1(),
+            listener.delegateFailureAndWrap(
+                (l, memoryEstimation) -> l.onResponse(new ExplainDataFrameAnalyticsAction.Response(fieldExtraction.v2(), memoryEstimation))
+            )
         );
-
-        estimateMemoryUsage(parentTaskId, config, fieldExtraction.v1(), memoryEstimationListener);
     }
 
     /**
@@ -220,11 +214,8 @@ private void estimateMemoryUsage(
             estimateMemoryTaskId,
             config,
             extractorFactory,
-            ActionListener.wrap(
-                result -> listener.onResponse(
-                    new MemoryEstimation(result.getExpectedMemoryWithoutDisk(), result.getExpectedMemoryWithDisk())
-                ),
-                listener::onFailure
+            listener.delegateFailureAndWrap(
+                (l, result) -> l.onResponse(new MemoryEstimation(result.getExpectedMemoryWithoutDisk(), result.getExpectedMemoryWithDisk()))
             )
         );
     }
diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportFinalizeJobExecutionAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportFinalizeJobExecutionAction.java
index 6d183501d2043..5aed29fd6d152 100644
--- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportFinalizeJobExecutionAction.java
+++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportFinalizeJobExecutionAction.java
@@ -96,15 +96,15 @@ protected void masterOperation(
                     ML_ORIGIN,
                     TransportUpdateAction.TYPE,
                     updateRequest,
-                    ActionListener.wrap(updateResponse -> chainedListener.onResponse(null), chainedListener::onFailure)
+                    chainedListener.delegateFailureAndWrap((l, updateResponse) -> l.onResponse(null))
                 );
             });
         }
 
-        voidChainTaskExecutor.execute(ActionListener.wrap(aVoids -> {
+        voidChainTaskExecutor.execute(listener.delegateFailureAndWrap((l, aVoids) -> {
             logger.debug("finalized job [{}]", jobIdString);
-            listener.onResponse(AcknowledgedResponse.TRUE);
-        }, listener::onFailure));
+            l.onResponse(AcknowledgedResponse.TRUE);
+        }));
     }
 
     @Override
diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportFlushJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportFlushJobAction.java
index a5fe3ad67ca06..17f1459984736 100644
--- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportFlushJobAction.java
+++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportFlushJobAction.java
@@ -66,10 +66,17 @@ protected void taskOperation(
             timeRangeBuilder.endTime(request.getEnd());
         }
         paramsBuilder.forTimeRange(timeRangeBuilder.build());
-        processManager.flushJob(task, paramsBuilder.build(), ActionListener.wrap(flushAcknowledgement -> {
-            listener.onResponse(
-                new FlushJobAction.Response(true, flushAcknowledgement == null ? null : flushAcknowledgement.getLastFinalizedBucketEnd())
-            );
-        }, listener::onFailure));
+        processManager.flushJob(
+            task,
+            paramsBuilder.build(),
+            listener.delegateFailureAndWrap(
+                (l, flushAcknowledgement) -> l.onResponse(
+                    new FlushJobAction.Response(
+                        true,
+                        flushAcknowledgement == null ? null : flushAcknowledgement.getLastFinalizedBucketEnd()
+                    )
+                )
+            )
+        );
     }
 }
diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetBucketsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetBucketsAction.java
index e42c2b5d87f9e..58de04146aa52 100644
--- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetBucketsAction.java
+++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetBucketsAction.java
@@ -41,7 +41,7 @@ public TransportGetBucketsAction(
 
     @Override
     protected void doExecute(Task task, GetBucketsAction.Request request, ActionListener listener) {
-        jobManager.jobExists(request.getJobId(), null, ActionListener.wrap(ok -> {
+        jobManager.jobExists(request.getJobId(), null, listener.delegateFailureAndWrap((delegate, ok) -> {
             BucketsQueryBuilder query = new BucketsQueryBuilder().expand(request.isExpand())
                 .includeInterim(request.isExcludeInterim() == false)
                 .start(request.getStart())
@@ -62,14 +62,10 @@ protected void doExecute(Task task, GetBucketsAction.Request request, ActionList
             jobResultsProvider.buckets(
                 request.getJobId(),
                 query,
-                q -> listener.onResponse(new GetBucketsAction.Response(q)),
-                listener::onFailure,
+                q -> delegate.onResponse(new GetBucketsAction.Response(q)),
+                delegate::onFailure,
                 client
             );
-
-        },
-            listener::onFailure
-
-        ));
+        }));
     }
 }
diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarEventsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarEventsAction.java
index 3e35429d352c2..89527d2cd12d8 100644
--- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarEventsAction.java
+++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarEventsAction.java
@@ -58,16 +58,15 @@ protected void doExecute(
         ActionListener listener
     ) {
         final String[] calendarId = Strings.splitStringByCommaToArray(request.getCalendarId());
-        ActionListener calendarExistsListener = ActionListener.wrap(r -> {
+        checkCalendarExists(calendarId, listener.delegateFailureAndWrap((outerDelegate, r) -> {
             ScheduledEventsQueryBuilder query = new ScheduledEventsQueryBuilder().start(request.getStart())
                 .end(request.getEnd())
                 .from(request.getPageParams().getFrom())
                 .size(request.getPageParams().getSize())
                 .calendarIds(calendarId);
 
-            ActionListener> eventsListener = ActionListener.wrap(
-                events -> listener.onResponse(new GetCalendarEventsAction.Response(events)),
-                listener::onFailure
+            ActionListener> eventsListener = outerDelegate.delegateFailureAndWrap(
+                (l, events) -> l.onResponse(new GetCalendarEventsAction.Response(events))
             );
 
             if (request.getJobId() != null) {
@@ -78,25 +77,18 @@ protected void doExecute(
 
                 }, jobNotFound -> {
                     // is the request Id a group?
-                    jobConfigProvider.groupExists(request.getJobId(), ActionListener.wrap(groupExists -> {
+                    jobConfigProvider.groupExists(request.getJobId(), eventsListener.delegateFailureAndWrap((delegate, groupExists) -> {
                         if (groupExists) {
-                            jobResultsProvider.scheduledEventsForJob(
-                                null,
-                                Collections.singletonList(request.getJobId()),
-                                query,
-                                eventsListener
-                            );
+                            jobResultsProvider.scheduledEventsForJob(null, Collections.singletonList(request.getJobId()), query, delegate);
                         } else {
-                            listener.onFailure(ExceptionsHelper.missingJobException(request.getJobId()));
+                            delegate.onFailure(ExceptionsHelper.missingJobException(request.getJobId()));
                         }
-                    }, listener::onFailure));
+                    }));
                 }));
             } else {
                 jobResultsProvider.scheduledEvents(query, eventsListener);
             }
-        }, listener::onFailure);
-
-        checkCalendarExists(calendarId, calendarExistsListener);
+        }));
     }
 
     private void checkCalendarExists(String[] calendarId, ActionListener listener) {
@@ -107,7 +99,7 @@ private void checkCalendarExists(String[] calendarId, ActionListener li
 
         jobResultsProvider.calendars(
             CalendarQueryBuilder.builder().calendarIdTokens(calendarId),
-            ActionListener.wrap(c -> listener.onResponse(true), listener::onFailure)
+            listener.delegateFailureAndWrap((l, c) -> l.onResponse(true))
         );
     }
 }
diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetDataFrameAnalyticsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetDataFrameAnalyticsAction.java
index bec0b86e77edb..eecc5999f842b 100644
--- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetDataFrameAnalyticsAction.java
+++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetDataFrameAnalyticsAction.java
@@ -83,7 +83,7 @@ protected void doExecute(
         searchResources(
             request,
             new TaskId(clusterService.localNode().getId(), task.getId()),
-            ActionListener.wrap(queryPage -> listener.onResponse(new GetDataFrameAnalyticsAction.Response(queryPage)), listener::onFailure)
+            listener.delegateFailureAndWrap((l, queryPage) -> l.onResponse(new GetDataFrameAnalyticsAction.Response(queryPage)))
         );
     }
 
diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetFiltersAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetFiltersAction.java
index 4d307546fda95..0ca5c706e5b8c 100644
--- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetFiltersAction.java
+++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetFiltersAction.java
@@ -54,7 +54,7 @@ protected void doExecute(Task task, GetFiltersAction.Request request, ActionList
         searchResources(
             request,
             new TaskId(clusterService.localNode().getId(), task.getId()),
-            ActionListener.wrap(filters -> listener.onResponse(new GetFiltersAction.Response(filters)), listener::onFailure)
+            listener.delegateFailureAndWrap((l, filters) -> l.onResponse(new GetFiltersAction.Response(filters)))
         );
     }
 
diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsAction.java
index 666e6bf478429..4ae6512fcaff4 100644
--- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsAction.java
+++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsAction.java
@@ -108,14 +108,13 @@ protected void doExecute(Task task, GetJobsStatsAction.Request request, ActionLi
             tasks,
             true,
             parentTaskId,
-            ActionListener.wrap(expandedIds -> {
+            finalListener.delegateFailureAndWrap((delegate, expandedIds) -> {
                 request.setExpandedJobsIds(new ArrayList<>(expandedIds));
-                ActionListener jobStatsListener = ActionListener.wrap(
-                    response -> gatherStatsForClosedJobs(request, response, parentTaskId, finalListener),
-                    finalListener::onFailure
+                ActionListener jobStatsListener = delegate.delegateFailureAndWrap(
+                    (l, response) -> gatherStatsForClosedJobs(request, response, parentTaskId, l)
                 );
                 super.doExecute(task, request, jobStatsListener);
-            }, finalListener::onFailure)
+            })
         );
     }
 
diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetMlAutoscalingStats.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetMlAutoscalingStats.java
index 78ef2f815c0fb..ab5949412927c 100644
--- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetMlAutoscalingStats.java
+++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetMlAutoscalingStats.java
@@ -78,7 +78,7 @@ protected void masterOperation(Task task, Request request, ClusterState state, A
             clusterService.getClusterSettings(),
             mlMemoryTracker,
             settings,
-            ActionListener.wrap(autoscalingResources -> listener.onResponse(new Response(autoscalingResources)), listener::onFailure)
+            listener.delegateFailureAndWrap((l, autoscalingResources) -> l.onResponse(new Response(autoscalingResources)))
         );
     }
 
diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetModelSnapshotsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetModelSnapshotsAction.java
index a5cc23544fbc9..67838fcfa26df 100644
--- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetModelSnapshotsAction.java
+++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetModelSnapshotsAction.java
@@ -82,7 +82,7 @@ protected void doExecute(
         jobManager.jobExists(
             request.getJobId(),
             parentTaskId,
-            ActionListener.wrap(ok -> getModelSnapshots(request, parentTaskId, listener), listener::onFailure)
+            listener.delegateFailureAndWrap((l, ok) -> getModelSnapshots(request, parentTaskId, l))
         );
     }
 
diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetOverallBucketsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetOverallBucketsAction.java
index d9dfd0fb23eeb..38c7f85b189f2 100644
--- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetOverallBucketsAction.java
+++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetOverallBucketsAction.java
@@ -120,22 +120,6 @@ private void getOverallBuckets(
     ) {
         JobsContext jobsContext = JobsContext.build(jobs, request);
 
-        ActionListener> overallBucketsListener = ActionListener.wrap(overallBuckets -> {
-            listener.onResponse(
-                new GetOverallBucketsAction.Response(new QueryPage<>(overallBuckets, overallBuckets.size(), OverallBucket.RESULTS_FIELD))
-            );
-        }, listener::onFailure);
-
-        ActionListener chunkedBucketSearcherListener = ActionListener.wrap(searcher -> {
-            if (searcher == null) {
-                listener.onResponse(
-                    new GetOverallBucketsAction.Response(new QueryPage<>(Collections.emptyList(), 0, OverallBucket.RESULTS_FIELD))
-                );
-                return;
-            }
-            searcher.searchAndComputeOverallBuckets(overallBucketsListener);
-        }, listener::onFailure);
-
         OverallBucketsProvider overallBucketsProvider = new OverallBucketsProvider(
             jobsContext.maxBucketSpan,
             request.getTopN(),
@@ -144,7 +128,29 @@ private void getOverallBuckets(
         OverallBucketsProcessor overallBucketsProcessor = requiresAggregation(request, jobsContext.maxBucketSpan)
             ? new OverallBucketsAggregator(request.getBucketSpan())
             : new OverallBucketsCollector();
-        initChunkedBucketSearcher(request, jobsContext, overallBucketsProvider, overallBucketsProcessor, chunkedBucketSearcherListener);
+        initChunkedBucketSearcher(
+            request,
+            jobsContext,
+            overallBucketsProvider,
+            overallBucketsProcessor,
+            listener.delegateFailureAndWrap((l, searcher) -> {
+                if (searcher == null) {
+                    l.onResponse(
+                        new GetOverallBucketsAction.Response(new QueryPage<>(Collections.emptyList(), 0, OverallBucket.RESULTS_FIELD))
+                    );
+                    return;
+                }
+                searcher.searchAndComputeOverallBuckets(
+                    l.delegateFailureAndWrap(
+                        (ll, overallBuckets) -> ll.onResponse(
+                            new GetOverallBucketsAction.Response(
+                                new QueryPage<>(overallBuckets, overallBuckets.size(), OverallBucket.RESULTS_FIELD)
+                            )
+                        )
+                    )
+                );
+            })
+        );
     }
 
     private static boolean requiresAggregation(GetOverallBucketsAction.Request request, TimeValue maxBucketSpan) {
diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPreviewDataFrameAnalyticsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPreviewDataFrameAnalyticsAction.java
index 9bf18671e7c11..8fe7c3686dcb9 100644
--- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPreviewDataFrameAnalyticsAction.java
+++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPreviewDataFrameAnalyticsAction.java
@@ -112,18 +112,18 @@ void preview(Task task, DataFrameAnalyticsConfig config, ActionListener {
+        extractedFieldsDetectorFactory.createFromSource(config, listener.delegateFailureAndWrap((delegate, extractedFieldsDetector) -> {
             DataFrameDataExtractor extractor = DataFrameDataExtractorFactory.createForSourceIndices(
                 client,
                 parentTaskId.toString(),
                 config,
                 extractedFieldsDetector.detect().v1()
             ).newExtractor(false);
-            extractor.preview(ActionListener.wrap(rows -> {
+            extractor.preview(delegate.delegateFailureAndWrap((l, rows) -> {
                 List fieldNames = extractor.getFieldNames();
-                listener.onResponse(new Response(rows.stream().map((r) -> mergeRow(r, fieldNames)).collect(Collectors.toList())));
-            }, listener::onFailure));
-        }, listener::onFailure));
+                l.onResponse(new Response(rows.stream().map((r) -> mergeRow(r, fieldNames)).collect(Collectors.toList())));
+            }));
+        }));
     }
 
 }
diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPreviewDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPreviewDatafeedAction.java
index 5ceb34bfc0510..d567a823c62cd 100644
--- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPreviewDatafeedAction.java
+++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPreviewDatafeedAction.java
@@ -7,7 +7,6 @@
 package org.elasticsearch.xpack.ml.action;
 
 import org.elasticsearch.action.ActionListener;
-import org.elasticsearch.action.fieldcaps.FieldCapabilities;
 import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest;
 import org.elasticsearch.action.fieldcaps.TransportFieldCapabilitiesAction;
 import org.elasticsearch.action.support.ActionFilters;
@@ -49,7 +48,6 @@
 import java.io.InputStreamReader;
 import java.nio.charset.StandardCharsets;
 import java.util.Date;
-import java.util.Map;
 import java.util.Optional;
 import java.util.stream.Collectors;
 
@@ -101,27 +99,26 @@ public TransportPreviewDatafeedAction(
     @Override
     protected void doExecute(Task task, PreviewDatafeedAction.Request request, ActionListener listener) {
         TaskId parentTaskId = new TaskId(clusterService.localNode().getId(), task.getId());
-        ActionListener datafeedConfigActionListener = ActionListener.wrap(datafeedConfig -> {
+        ActionListener datafeedConfigActionListener = listener.delegateFailureAndWrap((delegate, datafeedConfig) -> {
             if (request.getJobConfig() != null) {
-                previewDatafeed(parentTaskId, datafeedConfig, request.getJobConfig().build(new Date()), request, listener);
+                previewDatafeed(parentTaskId, datafeedConfig, request.getJobConfig().build(new Date()), request, delegate);
                 return;
             }
             jobConfigProvider.getJob(
                 datafeedConfig.getJobId(),
                 parentTaskId,
-                ActionListener.wrap(
-                    jobBuilder -> previewDatafeed(parentTaskId, datafeedConfig, jobBuilder.build(), request, listener),
-                    listener::onFailure
+                delegate.delegateFailureAndWrap(
+                    (l, jobBuilder) -> previewDatafeed(parentTaskId, datafeedConfig, jobBuilder.build(), request, l)
                 )
             );
-        }, listener::onFailure);
+        });
         if (request.getDatafeedConfig() != null) {
             datafeedConfigActionListener.onResponse(request.getDatafeedConfig());
         } else {
             datafeedConfigProvider.getDatafeedConfig(
                 request.getDatafeedId(),
                 parentTaskId,
-                ActionListener.wrap(builder -> datafeedConfigActionListener.onResponse(builder.build()), listener::onFailure)
+                datafeedConfigActionListener.delegateFailureAndWrap((l, builder) -> l.onResponse(builder.build()))
             );
         }
     }
@@ -209,10 +206,11 @@ private void isDateNanos(DatafeedConfig datafeed, String timeField, ActionListen
             client,
             TransportFieldCapabilitiesAction.TYPE,
             fieldCapabilitiesRequest,
-            ActionListener.wrap(fieldCapsResponse -> {
-                Map timeFieldCaps = fieldCapsResponse.getField(timeField);
-                listener.onResponse(timeFieldCaps.containsKey(DateFieldMapper.DATE_NANOS_CONTENT_TYPE));
-            }, listener::onFailure)
+            listener.delegateFailureAndWrap(
+                (l, fieldCapsResponse) -> l.onResponse(
+                    fieldCapsResponse.getField(timeField).containsKey(DateFieldMapper.DATE_NANOS_CONTENT_TYPE)
+                )
+            )
         );
     }
 
diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutDataFrameAnalyticsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutDataFrameAnalyticsAction.java
index d73b942e766cf..77bcc9dbcf7d8 100644
--- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutDataFrameAnalyticsAction.java
+++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutDataFrameAnalyticsAction.java
@@ -136,18 +136,13 @@ protected void masterOperation(
 
         final DataFrameAnalyticsConfig config = request.getConfig();
 
-        ActionListener sourceDestValidationListener = ActionListener.wrap(
-            aBoolean -> putValidatedConfig(config, request.masterNodeTimeout(), listener),
-            listener::onFailure
-        );
-
         sourceDestValidator.validate(
             clusterService.state(),
             config.getSource().getIndex(),
             config.getDest().getIndex(),
             null,
             SourceDestValidations.ALL_VALIDATIONS,
-            sourceDestValidationListener
+            listener.delegateFailureAndWrap((l, aBoolean) -> putValidatedConfig(config, request.masterNodeTimeout(), l))
         );
     }
 
@@ -191,22 +186,20 @@ private void putValidatedConfig(
                 }
                 privRequest.indexPrivileges(indicesPrivileges);
 
-                ActionListener privResponseListener = ActionListener.wrap(
-                    r -> handlePrivsResponse(username, preparedForPutConfig, r, masterNodeTimeout, listener),
-                    listener::onFailure
+                client.execute(
+                    HasPrivilegesAction.INSTANCE,
+                    privRequest,
+                    listener.delegateFailureAndWrap(
+                        (l, r) -> handlePrivsResponse(username, preparedForPutConfig, r, masterNodeTimeout, listener)
+                    )
                 );
-
-                client.execute(HasPrivilegesAction.INSTANCE, privRequest, privResponseListener);
             });
         } else {
             updateDocMappingAndPutConfig(
                 preparedForPutConfig,
                 threadPool.getThreadContext().getHeaders(),
                 masterNodeTimeout,
-                ActionListener.wrap(
-                    finalConfig -> listener.onResponse(new PutDataFrameAnalyticsAction.Response(finalConfig)),
-                    listener::onFailure
-                )
+                listener.delegateFailureAndWrap((l, finalConfig) -> l.onResponse(new PutDataFrameAnalyticsAction.Response(finalConfig)))
             );
         }
     }
@@ -223,10 +216,7 @@ private void handlePrivsResponse(
                 memoryCappedConfig,
                 threadPool.getThreadContext().getHeaders(),
                 masterNodeTimeout,
-                ActionListener.wrap(
-                    finalConfig -> listener.onResponse(new PutDataFrameAnalyticsAction.Response(finalConfig)),
-                    listener::onFailure
-                )
+                listener.delegateFailureAndWrap((l, finalConfig) -> l.onResponse(new PutDataFrameAnalyticsAction.Response(finalConfig)))
             );
         } else {
             XContentBuilder builder = JsonXContent.contentBuilder();
@@ -254,13 +244,13 @@ private void updateDocMappingAndPutConfig(
         TimeValue masterNodeTimeout,
         ActionListener listener
     ) {
-        ActionListener auditingListener = ActionListener.wrap(finalConfig -> {
+        ActionListener auditingListener = listener.delegateFailureAndWrap((delegate, finalConfig) -> {
             auditor.info(
                 finalConfig.getId(),
                 Messages.getMessage(Messages.DATA_FRAME_ANALYTICS_AUDIT_CREATED, finalConfig.getAnalysis().getWriteableName())
             );
-            listener.onResponse(finalConfig);
-        }, listener::onFailure);
+            delegate.onResponse(finalConfig);
+        });
 
         ClusterState clusterState = clusterService.state();
         if (clusterState == null) {
@@ -274,7 +264,7 @@ private void updateDocMappingAndPutConfig(
             client,
             clusterState,
             masterNodeTimeout,
-            ActionListener.wrap(unused -> configProvider.put(config, headers, masterNodeTimeout, auditingListener), listener::onFailure),
+            auditingListener.delegateFailureAndWrap((l, unused) -> configProvider.put(config, headers, masterNodeTimeout, l)),
             MlConfigIndex.CONFIG_INDEX_MAPPINGS_VERSION
         );
     }
diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelAction.java
index d6e52b6de1fd4..c89b5005444b5 100644
--- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelAction.java
+++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelAction.java
@@ -237,38 +237,24 @@ protected void masterOperation(
             return;
         }
 
-        ActionListener finalResponseAction = ActionListener.wrap(
-            (configToReturn) -> finalResponseListener.onResponse(new Response(configToReturn)),
-            finalResponseListener::onFailure
-        );
-
-        ActionListener verifyClusterAndModelArchitectures = ActionListener.wrap(
-            (configToReturn) -> verifyMlNodesAndModelArchitectures(configToReturn, client, threadPool, finalResponseAction),
-            finalResponseListener::onFailure
-        );
-
-        ActionListener finishedStoringListener = ActionListener.wrap(bool -> {
+        var isPackageModel = config.isPackagedModel();
+        ActionListener checkStorageIndexSizeListener = finalResponseListener.delegateFailureAndWrap((delegate, bool) -> {
             TrainedModelConfig configToReturn = trainedModelConfig.clearDefinition().build();
             if (modelPackageConfigHolder.get() != null) {
                 triggerModelFetchIfNecessary(
                     configToReturn.getModelId(),
                     modelPackageConfigHolder.get(),
                     request.isWaitForCompletion(),
-                    ActionListener.wrap(
-                        downloadTriggered -> verifyClusterAndModelArchitectures.onResponse(configToReturn),
-                        finalResponseListener::onFailure
-                    )
+                    delegate.delegateFailureAndWrap((l, cfg) -> l.onResponse(new Response(cfg)))
+                        .delegateFailureAndWrap(
+                            (l, cfg) -> verifyMlNodesAndModelArchitectures(cfg, client, threadPool, l)
+                        )
+                        .delegateFailureAndWrap((l, downloadTriggered) -> l.onResponse(configToReturn))
                 );
             } else {
-                finalResponseListener.onResponse(new PutTrainedModelAction.Response(configToReturn));
+                delegate.onResponse(new PutTrainedModelAction.Response(configToReturn));
             }
-        }, finalResponseListener::onFailure);
-
-        var isPackageModel = config.isPackagedModel();
-        ActionListener checkStorageIndexSizeListener = ActionListener.wrap(
-            r -> trainedModelProvider.storeTrainedModel(trainedModelConfig.build(), finishedStoringListener, isPackageModel),
-            finalResponseListener::onFailure
-        );
+        }).delegateFailureAndWrap((l, r) -> trainedModelProvider.storeTrainedModel(trainedModelConfig.build(), l, isPackageModel));
 
         ActionListener tagsModelIdCheckListener = ActionListener.wrap(r -> {
             if (TrainedModelType.PYTORCH.equals(trainedModelConfig.getModelType())) {
diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java
index 6c3fb28fe2c83..4119b23747fcb 100644
--- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java
+++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java
@@ -148,9 +148,9 @@ public void preview(ActionListener> listener) {
             client,
             TransportSearchAction.TYPE,
             searchRequestBuilder.request(),
-            ActionListener.wrap(searchResponse -> {
+            listener.delegateFailureAndWrap((delegate, searchResponse) -> {
                 if (searchResponse.getHits().getHits().length == 0) {
-                    listener.onResponse(Collections.emptyList());
+                    delegate.onResponse(Collections.emptyList());
                     return;
                 }
 
@@ -160,8 +160,8 @@ public void preview(ActionListener> listener) {
                     String[] extractedValues = extractValues(hit);
                     rows.add(extractedValues == null ? new Row(null, hit, true) : new Row(extractedValues, hit, false));
                 }
-                listener.onResponse(rows);
-            }, listener::onFailure)
+                delegate.onResponse(rows);
+            })
         );
     }
 
@@ -393,11 +393,8 @@ public void collectDataSummaryAsync(ActionListener dataSummaryActio
             client,
             TransportSearchAction.TYPE,
             searchRequestBuilder.request(),
-            ActionListener.wrap(
-                searchResponse -> dataSummaryActionListener.onResponse(
-                    new DataSummary(searchResponse.getHits().getTotalHits().value, numberOfFields)
-                ),
-                dataSummaryActionListener::onFailure
+            dataSummaryActionListener.delegateFailureAndWrap(
+                (l, searchResponse) -> l.onResponse(new DataSummary(searchResponse.getHits().getTotalHits().value, numberOfFields))
             )
         );
     }

From 01751c0298de44ce13f96a7ad8763c9e2c7f0873 Mon Sep 17 00:00:00 2001
From: Liam Thompson <32779855+leemthompo@users.noreply.github.com>
Date: Fri, 12 Jan 2024 12:13:34 +0100
Subject: [PATCH 66/75] [Docs] Add connectors links, cleanup connectors API
 docs (#104262)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

* [Docs] Add connectors links

* 🧹 Cleanup abbreviations, add missing tech preview labels

* Unify remaining tech preview, abbreviations, update CLI verbiage

* Unify remaining tech preview, abbreviations
---
 .../apis/cancel-connector-sync-job-api.asciidoc     |  2 ++
 .../connector/apis/check-in-connector-api.asciidoc  |  5 ++---
 .../apis/check-in-connector-sync-job-api.asciidoc   |  2 ++
 .../connector/apis/connector-apis.asciidoc          | 13 ++++++-------
 .../connector/apis/create-connector-api.asciidoc    |  2 ++
 .../apis/create-connector-sync-job-api.asciidoc     |  3 +++
 .../connector/apis/delete-connector-api.asciidoc    |  5 ++---
 .../apis/delete-connector-sync-job-api.asciidoc     |  5 ++---
 .../connector/apis/get-connector-api.asciidoc       |  3 ++-
 .../apis/get-connector-sync-job-api.asciidoc        |  3 ++-
 .../apis/list-connector-sync-jobs-api.asciidoc      |  5 ++---
 .../connector/apis/list-connectors-api.asciidoc     |  5 ++---
 .../apis/set-connector-sync-job-error-api.asciidoc  |  2 ++
 .../apis/set-connector-sync-job-stats-api.asciidoc  |  2 ++
 .../update-connector-configuration-api.asciidoc     |  5 ++---
 .../apis/update-connector-error-api.asciidoc        |  5 ++---
 .../apis/update-connector-filtering-api.asciidoc    |  6 +++---
 .../apis/update-connector-last-sync-api.asciidoc    |  5 ++---
 .../update-connector-name-description-api.asciidoc  |  6 +++---
 .../apis/update-connector-pipeline-api.asciidoc     |  5 ++---
 .../apis/update-connector-scheduling-api.asciidoc   |  5 ++---
 21 files changed, 49 insertions(+), 45 deletions(-)

diff --git a/docs/reference/connector/apis/cancel-connector-sync-job-api.asciidoc b/docs/reference/connector/apis/cancel-connector-sync-job-api.asciidoc
index 6123b7eb5511d..bd886bf923af8 100644
--- a/docs/reference/connector/apis/cancel-connector-sync-job-api.asciidoc
+++ b/docs/reference/connector/apis/cancel-connector-sync-job-api.asciidoc
@@ -4,6 +4,8 @@
 Cancel connector sync job
 ++++
 
+preview::[]
+
 Cancels a connector sync job.
 
 [[cancel-connector-sync-job-api-request]]
diff --git a/docs/reference/connector/apis/check-in-connector-api.asciidoc b/docs/reference/connector/apis/check-in-connector-api.asciidoc
index c0c021f1304dc..9f88c595e3a67 100644
--- a/docs/reference/connector/apis/check-in-connector-api.asciidoc
+++ b/docs/reference/connector/apis/check-in-connector-api.asciidoc
@@ -1,12 +1,11 @@
 [[check-in-connector-api]]
 === Check in connector API
-
-preview::[]
-
 ++++
 Check in a connector
 ++++
 
+preview::[]
+
 Updates the `last_seen` field of a connector with current timestamp.
 
 [[check-in-connector-api-request]]
diff --git a/docs/reference/connector/apis/check-in-connector-sync-job-api.asciidoc b/docs/reference/connector/apis/check-in-connector-sync-job-api.asciidoc
index 04c8057e2c115..d3cc34bf025ed 100644
--- a/docs/reference/connector/apis/check-in-connector-sync-job-api.asciidoc
+++ b/docs/reference/connector/apis/check-in-connector-sync-job-api.asciidoc
@@ -4,6 +4,8 @@
 Check in connector sync job
 ++++
 
+preview::[]
+
 Checks in a connector sync job (updates `last_seen` to the current time).
 
 [[check-in-connector-sync-job-api-request]]
diff --git a/docs/reference/connector/apis/connector-apis.asciidoc b/docs/reference/connector/apis/connector-apis.asciidoc
index e127dc07446b5..eabb531551fe5 100644
--- a/docs/reference/connector/apis/connector-apis.asciidoc
+++ b/docs/reference/connector/apis/connector-apis.asciidoc
@@ -3,17 +3,16 @@
 
 preview::[]
 
-++++
-Connector APIs
-++++
-
----
-
-The connector and sync jobs API provides a convenient way to create and manage Elastic connectors and sync jobs in an internal index.
+The connector and sync jobs API provides a convenient way to create and manage Elastic {enterprise-search-ref}/connectors.html[connectors^] and sync jobs in an internal index.
 
 This API provides an alternative to relying solely on {kib} UI for connector and sync job management. The API comes with a set of
 validations and assertions to ensure that the state representation in the internal index remains valid.
 
+[TIP]
+====
+We also have a command-line interface for Elastic connectors. Learn more in the https://github.com/elastic/connectors/blob/main/docs/CLI.md[elastic/connectors] repository.
+====
+
 [discrete]
 [[elastic-connector-apis]]
 === Connector APIs
diff --git a/docs/reference/connector/apis/create-connector-api.asciidoc b/docs/reference/connector/apis/create-connector-api.asciidoc
index b62ca4ad070a4..2c1c4c9ba7bc4 100644
--- a/docs/reference/connector/apis/create-connector-api.asciidoc
+++ b/docs/reference/connector/apis/create-connector-api.asciidoc
@@ -4,6 +4,8 @@
 Create connector
 ++++
 
+preview::[]
+
 Creates a connector.
 
 
diff --git a/docs/reference/connector/apis/create-connector-sync-job-api.asciidoc b/docs/reference/connector/apis/create-connector-sync-job-api.asciidoc
index e8c2c364797c4..b036485285256 100644
--- a/docs/reference/connector/apis/create-connector-sync-job-api.asciidoc
+++ b/docs/reference/connector/apis/create-connector-sync-job-api.asciidoc
@@ -4,6 +4,9 @@
 Create connector sync job
 ++++
 
+preview::[]
+
+
 Creates a connector sync job.
 
 [source, console]
diff --git a/docs/reference/connector/apis/delete-connector-api.asciidoc b/docs/reference/connector/apis/delete-connector-api.asciidoc
index 6d3a120df785a..c7e9dcd94d2ad 100644
--- a/docs/reference/connector/apis/delete-connector-api.asciidoc
+++ b/docs/reference/connector/apis/delete-connector-api.asciidoc
@@ -1,12 +1,11 @@
 [[delete-connector-api]]
 === Delete connector API
-
-preview::[]
-
 ++++
 Delete connector
 ++++
 
+preview::[]
+
 Removes a connector and its associated data.
 This is a destructive action that is not recoverable.
 
diff --git a/docs/reference/connector/apis/delete-connector-sync-job-api.asciidoc b/docs/reference/connector/apis/delete-connector-sync-job-api.asciidoc
index 8641794576bf1..32df172df758a 100644
--- a/docs/reference/connector/apis/delete-connector-sync-job-api.asciidoc
+++ b/docs/reference/connector/apis/delete-connector-sync-job-api.asciidoc
@@ -1,12 +1,11 @@
 [[delete-connector-sync-job-api]]
 === Delete connector sync job API
-
-preview::[]
-
 ++++
 Delete connector sync job
 ++++
 
+preview::[]
+
 Removes a connector sync job and its associated data.
 This is a destructive action that is not recoverable.
 
diff --git a/docs/reference/connector/apis/get-connector-api.asciidoc b/docs/reference/connector/apis/get-connector-api.asciidoc
index ab4a2758ce4f1..693a9fd767806 100644
--- a/docs/reference/connector/apis/get-connector-api.asciidoc
+++ b/docs/reference/connector/apis/get-connector-api.asciidoc
@@ -1,10 +1,11 @@
 [[get-connector-api]]
 === Get connector API
-preview::[]
 ++++
 Get connector
 ++++
 
+preview::[]
+
 Retrieves the details about a connector.
 
 [[get-connector-api-request]]
diff --git a/docs/reference/connector/apis/get-connector-sync-job-api.asciidoc b/docs/reference/connector/apis/get-connector-sync-job-api.asciidoc
index b33aec8c55e60..bfa82ea0d345c 100644
--- a/docs/reference/connector/apis/get-connector-sync-job-api.asciidoc
+++ b/docs/reference/connector/apis/get-connector-sync-job-api.asciidoc
@@ -1,10 +1,11 @@
 [[get-connector-sync-job-api]]
 === Get connector sync job API
-preview::[]
 ++++
 Get connector sync job
 ++++
 
+preview::[]
+
 Retrieves the details about a connector sync job.
 
 [[get-connector-sync-job-api-request]]
diff --git a/docs/reference/connector/apis/list-connector-sync-jobs-api.asciidoc b/docs/reference/connector/apis/list-connector-sync-jobs-api.asciidoc
index 8b88f318f5304..a8851885b5051 100644
--- a/docs/reference/connector/apis/list-connector-sync-jobs-api.asciidoc
+++ b/docs/reference/connector/apis/list-connector-sync-jobs-api.asciidoc
@@ -1,13 +1,12 @@
 [role="xpack"]
 [[list-connector-sync-jobs-api]]
 === List connector sync jobs API
-
-preview::[]
-
 ++++
 List connector sync jobs
 ++++
 
+preview::[]
+
 Returns information about all stored connector sync jobs ordered by their creation date in ascending order.
 
 
diff --git a/docs/reference/connector/apis/list-connectors-api.asciidoc b/docs/reference/connector/apis/list-connectors-api.asciidoc
index 57d3cc47aeb7a..9b3fc50690243 100644
--- a/docs/reference/connector/apis/list-connectors-api.asciidoc
+++ b/docs/reference/connector/apis/list-connectors-api.asciidoc
@@ -1,13 +1,12 @@
 [role="xpack"]
 [[list-connector-api]]
 === List connectors API
-
-preview::[]
-
 ++++
 List connectors
 ++++
 
+preview::[]
+
 Returns information about all stored connectors.
 
 
diff --git a/docs/reference/connector/apis/set-connector-sync-job-error-api.asciidoc b/docs/reference/connector/apis/set-connector-sync-job-error-api.asciidoc
index 935fcccc77fcf..a9dbf5ceb1eb2 100644
--- a/docs/reference/connector/apis/set-connector-sync-job-error-api.asciidoc
+++ b/docs/reference/connector/apis/set-connector-sync-job-error-api.asciidoc
@@ -4,6 +4,8 @@
 Set connector sync job error
 ++++
 
+preview::[]
+
 Sets a connector sync job error.
 
 [[set-connector-sync-job-error-api-request]]
diff --git a/docs/reference/connector/apis/set-connector-sync-job-stats-api.asciidoc b/docs/reference/connector/apis/set-connector-sync-job-stats-api.asciidoc
index 0513155312bb4..a417bcf8b9e9f 100644
--- a/docs/reference/connector/apis/set-connector-sync-job-stats-api.asciidoc
+++ b/docs/reference/connector/apis/set-connector-sync-job-stats-api.asciidoc
@@ -4,6 +4,8 @@
 Set connector sync job stats
 ++++
 
+preview::[]
+
 Sets connector sync job stats.
 
 [[set-connector-sync-job-stats-api-request]]
diff --git a/docs/reference/connector/apis/update-connector-configuration-api.asciidoc b/docs/reference/connector/apis/update-connector-configuration-api.asciidoc
index 6d6591a6f00bc..57484c14d0f90 100644
--- a/docs/reference/connector/apis/update-connector-configuration-api.asciidoc
+++ b/docs/reference/connector/apis/update-connector-configuration-api.asciidoc
@@ -1,12 +1,11 @@
 [[update-connector-configuration-api]]
 === Update connector configuration API
-
-preview::[]
-
 ++++
 Update connector configuration
 ++++
 
+preview::[]
+
 Updates the `configuration` of a connector.
 
 
diff --git a/docs/reference/connector/apis/update-connector-error-api.asciidoc b/docs/reference/connector/apis/update-connector-error-api.asciidoc
index 19bc15f0dc60a..dbed25f1bf8d5 100644
--- a/docs/reference/connector/apis/update-connector-error-api.asciidoc
+++ b/docs/reference/connector/apis/update-connector-error-api.asciidoc
@@ -1,12 +1,11 @@
 [[update-connector-error-api]]
 === Update connector error API
-
-preview::[]
-
 ++++
 Update connector error
 ++++
 
+preview::[]
+
 Updates the `error` field of a connector.
 
 [[update-connector-error-api-request]]
diff --git a/docs/reference/connector/apis/update-connector-filtering-api.asciidoc b/docs/reference/connector/apis/update-connector-filtering-api.asciidoc
index d4c7bb16a3304..3e81f0fda2ce7 100644
--- a/docs/reference/connector/apis/update-connector-filtering-api.asciidoc
+++ b/docs/reference/connector/apis/update-connector-filtering-api.asciidoc
@@ -1,12 +1,12 @@
 [[update-connector-filtering-api]]
 === Update connector filtering API
-
-preview::[]
-
 ++++
 Update connector filtering
 ++++
 
+preview::[]
+
+
 Updates the `filtering` configuration of a connector. Learn more about filtering in the {enterprise-search-ref}/sync-rules.html[sync rules] documentation.
 
 [[update-connector-filtering-api-request]]
diff --git a/docs/reference/connector/apis/update-connector-last-sync-api.asciidoc b/docs/reference/connector/apis/update-connector-last-sync-api.asciidoc
index e9fffd22b21cd..6f41925e3676f 100644
--- a/docs/reference/connector/apis/update-connector-last-sync-api.asciidoc
+++ b/docs/reference/connector/apis/update-connector-last-sync-api.asciidoc
@@ -1,12 +1,11 @@
 [[update-connector-last-sync-api]]
 === Update connector last sync stats API
-
-preview::[]
-
 ++++
 Update connector last sync stats
 ++++
 
+preview::[]
+
 Updates the fields related to the last sync of a connector.
 
 This action is used for analytics and monitoring.
diff --git a/docs/reference/connector/apis/update-connector-name-description-api.asciidoc b/docs/reference/connector/apis/update-connector-name-description-api.asciidoc
index d45fb545e168b..c54dba8dd72b5 100644
--- a/docs/reference/connector/apis/update-connector-name-description-api.asciidoc
+++ b/docs/reference/connector/apis/update-connector-name-description-api.asciidoc
@@ -1,12 +1,12 @@
 [[update-connector-name-description-api]]
 === Update connector name and description API
-
-preview::[]
-
 ++++
 Update connector name and description
 ++++
 
+preview::[]
+
+
 Updates the `name` and `description` fields of a connector.
 
 [[update-connector-name-description-api-request]]
diff --git a/docs/reference/connector/apis/update-connector-pipeline-api.asciidoc b/docs/reference/connector/apis/update-connector-pipeline-api.asciidoc
index 6938506703da8..63872bf96aa55 100644
--- a/docs/reference/connector/apis/update-connector-pipeline-api.asciidoc
+++ b/docs/reference/connector/apis/update-connector-pipeline-api.asciidoc
@@ -1,12 +1,11 @@
 [[update-connector-pipeline-api]]
 === Update connector pipeline API
-
-preview::[]
-
 ++++
 Update connector pipeline
 ++++
 
+preview::[]
+
 Updates the `pipeline` configuration of a connector.
 
 When you create a new connector, the configuration of an <> is populated with default settings.
diff --git a/docs/reference/connector/apis/update-connector-scheduling-api.asciidoc b/docs/reference/connector/apis/update-connector-scheduling-api.asciidoc
index c47e6d4c0367b..7a2f33bcaeaa8 100644
--- a/docs/reference/connector/apis/update-connector-scheduling-api.asciidoc
+++ b/docs/reference/connector/apis/update-connector-scheduling-api.asciidoc
@@ -1,12 +1,11 @@
 [[update-connector-scheduling-api]]
 === Update connector scheduling API
-
-preview::[]
-
 ++++
 Update connector scheduling
 ++++
 
+preview::[]
+
 Updates the `scheduling` configuration of a connector.
 
 [[update-connector-scheduling-api-request]]

From 1dfc72311669af21a1832eb3ca3912eae3ef367d Mon Sep 17 00:00:00 2001
From: Armin Braun 
Date: Fri, 12 Jan 2024 12:21:42 +0100
Subject: [PATCH 67/75] Speed up search cancellation checks by storing them in
 list (#104304)

We only add lambdas to this thing, the contains check is pointless, so
we might as well use a list here that iterates more efficiently.

relates #104273
---
 .../search/internal/ContextIndexSearcher.java             | 8 +++-----
 .../org/elasticsearch/search/SearchCancellationTests.java | 2 --
 2 files changed, 3 insertions(+), 7 deletions(-)

diff --git a/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java b/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java
index d834c12d0abe1..0263c6e83b17a 100644
--- a/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java
+++ b/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java
@@ -50,7 +50,6 @@
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.Comparator;
-import java.util.HashSet;
 import java.util.List;
 import java.util.Objects;
 import java.util.PriorityQueue;
@@ -525,13 +524,12 @@ public DirectoryReader getDirectoryReader() {
 
     private static class MutableQueryTimeout implements ExitableDirectoryReader.QueryCancellation {
 
-        private final Set runnables = new HashSet<>();
+        private final List runnables = new ArrayList<>();
 
         private Runnable add(Runnable action) {
             Objects.requireNonNull(action, "cancellation runnable should not be null");
-            if (runnables.add(action) == false) {
-                throw new IllegalArgumentException("Cancellation runnable already added");
-            }
+            assert runnables.contains(action) == false : "Cancellation runnable already added";
+            runnables.add(action);
             return action;
         }
 
diff --git a/server/src/test/java/org/elasticsearch/search/SearchCancellationTests.java b/server/src/test/java/org/elasticsearch/search/SearchCancellationTests.java
index 19f6400badcf5..79f16ab390dd2 100644
--- a/server/src/test/java/org/elasticsearch/search/SearchCancellationTests.java
+++ b/server/src/test/java/org/elasticsearch/search/SearchCancellationTests.java
@@ -92,8 +92,6 @@ public void testAddingCancellationActions() throws IOException {
 
         Runnable r = () -> {};
         searcher.addQueryCancellation(r);
-        IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> searcher.addQueryCancellation(r));
-        assertEquals("Cancellation runnable already added", iae.getMessage());
     }
 
     public void testCancellableCollector() throws IOException {

From ed8c98095a83d5ce8eec73119affa013a9ad3fe8 Mon Sep 17 00:00:00 2001
From: David Turner 
Date: Fri, 12 Jan 2024 11:22:27 +0000
Subject: [PATCH 68/75] Reduce contention in
 `CancellableTask#ensureNotCancelled` (#104305)

No need to acquire the mutex to read the volatile `isCancelled` field,
we can check that first to avoid contention.

Relates #104273
---
 .../main/java/org/elasticsearch/tasks/CancellableTask.java  | 6 ++++--
 1 file changed, 4 insertions(+), 2 deletions(-)

diff --git a/server/src/main/java/org/elasticsearch/tasks/CancellableTask.java b/server/src/main/java/org/elasticsearch/tasks/CancellableTask.java
index 16eb7b7b2fb0f..8a0aa2033a30e 100644
--- a/server/src/main/java/org/elasticsearch/tasks/CancellableTask.java
+++ b/server/src/main/java/org/elasticsearch/tasks/CancellableTask.java
@@ -85,9 +85,11 @@ protected void onCancelled() {}
     /**
      * Throws a {@link TaskCancelledException} if this task has been cancelled, otherwise does nothing.
      */
-    public final synchronized void ensureNotCancelled() {
+    public final void ensureNotCancelled() {
         if (isCancelled()) {
-            throw getTaskCancelledException();
+            synchronized (this) {
+                throw getTaskCancelledException();
+            }
         }
     }
 

From 8acfebeb0840e228fa35c3ce1566728a07b1b452 Mon Sep 17 00:00:00 2001
From: Niels Bauman <33722607+nielsbauman@users.noreply.github.com>
Date: Fri, 12 Jan 2024 12:36:09 +0100
Subject: [PATCH 69/75] Cleanup `OperationModeUpdateTask` (#104265)

The `hashCode` implementation called `super.hashCode` which effectively (incorrectly) called `Object.hashCode`. The hash code didn't seem to be used anywhere, so removing made more sense than fixing.

The `equals` also looked a bit off as it did include `priority` from the parent class, but not `timeout`. Rather than implementing a proper equals method (in both `OperationModeUpdateTask` and `ClusterStateUpdateTask`, it's parent), I decided to just remove the method as I could only find one real use case, which was inside a test.
---
 docs/changelog/104265.yaml                    |  6 ++++++
 .../core/ilm/OperationModeUpdateTask.java     | 21 -------------------
 .../xpack/ilm/IndexLifecycleServiceTests.java |  7 +++----
 .../slm/SnapshotLifecycleServiceTests.java    | 11 ++++++----
 4 files changed, 16 insertions(+), 29 deletions(-)
 create mode 100644 docs/changelog/104265.yaml

diff --git a/docs/changelog/104265.yaml b/docs/changelog/104265.yaml
new file mode 100644
index 0000000000000..88c3d72ee81d0
--- /dev/null
+++ b/docs/changelog/104265.yaml
@@ -0,0 +1,6 @@
+pr: 104265
+summary: Remove `hashCode` and `equals` from `OperationModeUpdateTask`
+area: ILM+SLM
+type: bug
+issues:
+ - 100871
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/OperationModeUpdateTask.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/OperationModeUpdateTask.java
index c36d73e8d12d6..1072e6ee4c899 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/OperationModeUpdateTask.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/OperationModeUpdateTask.java
@@ -18,8 +18,6 @@
 import org.elasticsearch.common.Priority;
 import org.elasticsearch.core.Nullable;
 
-import java.util.Objects;
-
 import static org.elasticsearch.xpack.core.ilm.LifecycleOperationMetadata.currentILMMode;
 import static org.elasticsearch.xpack.core.ilm.LifecycleOperationMetadata.currentSLMMode;
 
@@ -157,23 +155,4 @@ public void clusterStateProcessed(ClusterState oldState, ClusterState newState)
             logger.info("SLM operation mode updated to {}", slmMode);
         }
     }
-
-    @Override
-    public int hashCode() {
-        return Objects.hash(super.hashCode(), ilmMode, slmMode);
-    }
-
-    @Override
-    public boolean equals(Object obj) {
-        if (obj == null) {
-            return false;
-        }
-        if (obj.getClass() != getClass()) {
-            return false;
-        }
-        OperationModeUpdateTask other = (OperationModeUpdateTask) obj;
-        return Objects.equals(priority(), other.priority())
-            && Objects.equals(ilmMode, other.ilmMode)
-            && Objects.equals(slmMode, other.slmMode);
-    }
 }
diff --git a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleServiceTests.java b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleServiceTests.java
index f47fc38206183..dd1e2bb9d8dd7 100644
--- a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleServiceTests.java
+++ b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleServiceTests.java
@@ -282,13 +282,12 @@ private void verifyCanStopWithStep(String stoppableStep) {
         ClusterChangedEvent event = new ClusterChangedEvent("_source", currentState, ClusterState.EMPTY_STATE);
         SetOnce changedOperationMode = new SetOnce<>();
         doAnswer(invocationOnMock -> {
+            OperationModeUpdateTask task = (OperationModeUpdateTask) invocationOnMock.getArguments()[1];
+            assertEquals(task.getILMOperationMode(), OperationMode.STOPPED);
             changedOperationMode.set(true);
             return null;
         }).when(clusterService)
-            .submitUnbatchedStateUpdateTask(
-                eq("ilm_operation_mode_update[stopped]"),
-                eq(OperationModeUpdateTask.ilmMode(OperationMode.STOPPED))
-            );
+            .submitUnbatchedStateUpdateTask(eq("ilm_operation_mode_update[stopped]"), any(OperationModeUpdateTask.class));
         indexLifecycleService.applyClusterState(event);
         indexLifecycleService.triggerPolicies(currentState, true);
         assertTrue(changedOperationMode.get());
diff --git a/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SnapshotLifecycleServiceTests.java b/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SnapshotLifecycleServiceTests.java
index 3541edfa20c93..9bbb08e89166e 100644
--- a/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SnapshotLifecycleServiceTests.java
+++ b/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SnapshotLifecycleServiceTests.java
@@ -21,6 +21,7 @@
 import org.elasticsearch.cluster.service.ClusterApplierService;
 import org.elasticsearch.cluster.service.ClusterService;
 import org.elasticsearch.cluster.service.MasterService;
+import org.elasticsearch.common.Priority;
 import org.elasticsearch.common.scheduler.SchedulerEngine;
 import org.elasticsearch.common.settings.ClusterSettings;
 import org.elasticsearch.common.settings.Settings;
@@ -454,13 +455,13 @@ public void testStoppedPriority() {
                 )
             )
         );
-        final SetOnce task = new SetOnce<>();
+        final SetOnce task = new SetOnce<>();
         ClusterService fakeService = new ClusterService(Settings.EMPTY, clusterSettings, threadPool, null) {
             @Override
             public void submitUnbatchedStateUpdateTask(String source, ClusterStateUpdateTask updateTask) {
                 logger.info("--> got task: [source: {}]: {}", source, updateTask);
-                if (updateTask instanceof OperationModeUpdateTask) {
-                    task.set(updateTask);
+                if (updateTask instanceof OperationModeUpdateTask operationModeUpdateTask) {
+                    task.set(operationModeUpdateTask);
                 }
             }
         };
@@ -476,7 +477,9 @@ public void submitUnbatchedStateUpdateTask(String source, ClusterStateUpdateTask
             true
         );
         service.clusterChanged(new ClusterChangedEvent("blah", state, ClusterState.EMPTY_STATE));
-        assertThat(task.get(), equalTo(OperationModeUpdateTask.slmMode(OperationMode.STOPPED)));
+        assertEquals(task.get().priority(), Priority.IMMEDIATE);
+        assertNull(task.get().getILMOperationMode());
+        assertEquals(task.get().getSLMOperationMode(), OperationMode.STOPPED);
         threadPool.shutdownNow();
     }
 

From 8a159b74bf9cca019316c1342d4d491b7079057e Mon Sep 17 00:00:00 2001
From: Armin Braun 
Date: Fri, 12 Jan 2024 13:39:04 +0100
Subject: [PATCH 70/75] Chunk BulkResponse on REST layer (#104310)

If there's a large number of failures in these they can grow very large.
Chunking these at least avoids the O(n) memory cost when sending these
out over the REST layer. Even without failures large REST responses for
these can grow to a size that exceeds what can be written to the channel
right away and take very visible time to serialize (~7% of all
coordinating node CPU time during ingest for the http_logs rally
track!!). Better to smooth out the cost as write capacity becomes
available.
---
 .../action/bulk/BulkResponse.java             | 36 +++++++++----------
 .../rest/action/document/RestBulkAction.java  |  4 +--
 .../ingest/RestSimulateIngestAction.java      |  7 ++--
 .../action/bulk/BulkResponseTests.java        |  8 ++++-
 4 files changed, 28 insertions(+), 27 deletions(-)

diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkResponse.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkResponse.java
index 0ce472520a4fd..2065a31ce5566 100644
--- a/server/src/main/java/org/elasticsearch/action/bulk/BulkResponse.java
+++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkResponse.java
@@ -12,9 +12,9 @@
 import org.elasticsearch.common.collect.Iterators;
 import org.elasticsearch.common.io.stream.StreamInput;
 import org.elasticsearch.common.io.stream.StreamOutput;
+import org.elasticsearch.common.xcontent.ChunkedToXContentObject;
 import org.elasticsearch.core.TimeValue;
-import org.elasticsearch.xcontent.ToXContentObject;
-import org.elasticsearch.xcontent.XContentBuilder;
+import org.elasticsearch.xcontent.ToXContent;
 import org.elasticsearch.xcontent.XContentParser;
 
 import java.io.IOException;
@@ -31,7 +31,7 @@
  * bulk requests. Each item holds the index/type/id is operated on, and if it failed or not (with the
  * failure message).
  */
-public class BulkResponse extends ActionResponse implements Iterable, ToXContentObject {
+public class BulkResponse extends ActionResponse implements Iterable, ChunkedToXContentObject {
 
     private static final String ITEMS = "items";
     private static final String ERRORS = "errors";
@@ -133,23 +133,6 @@ public void writeTo(StreamOutput out) throws IOException {
         out.writeZLong(ingestTookInMillis);
     }
 
-    @Override
-    public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
-        builder.startObject();
-        builder.field(ERRORS, hasFailures());
-        builder.field(TOOK, tookInMillis);
-        if (ingestTookInMillis != BulkResponse.NO_INGEST_TOOK) {
-            builder.field(INGEST_TOOK, ingestTookInMillis);
-        }
-        builder.startArray(ITEMS);
-        for (BulkItemResponse item : this) {
-            item.toXContent(builder, params);
-        }
-        builder.endArray();
-        builder.endObject();
-        return builder;
-    }
-
     public static BulkResponse fromXContent(XContentParser parser) throws IOException {
         XContentParser.Token token = parser.nextToken();
         ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser);
@@ -184,4 +167,17 @@ public static BulkResponse fromXContent(XContentParser parser) throws IOExceptio
         }
         return new BulkResponse(items.toArray(new BulkItemResponse[items.size()]), took, ingestTook);
     }
+
+    @Override
+    public Iterator toXContentChunked(ToXContent.Params params) {
+        return Iterators.concat(Iterators.single((builder, p) -> {
+            builder.startObject();
+            builder.field(ERRORS, hasFailures());
+            builder.field(TOOK, tookInMillis);
+            if (ingestTookInMillis != BulkResponse.NO_INGEST_TOOK) {
+                builder.field(INGEST_TOOK, ingestTookInMillis);
+            }
+            return builder.startArray(ITEMS);
+        }), Iterators.forArray(responses), Iterators.single((builder, p) -> builder.endArray().endObject()));
+    }
 }
diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java
index 7bfac46495b23..83a7728b82a4a 100644
--- a/server/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java
+++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java
@@ -19,7 +19,7 @@
 import org.elasticsearch.rest.RestRequest;
 import org.elasticsearch.rest.Scope;
 import org.elasticsearch.rest.ServerlessScope;
-import org.elasticsearch.rest.action.RestToXContentListener;
+import org.elasticsearch.rest.action.RestRefCountedChunkedToXContentListener;
 import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
 
 import java.io.IOException;
@@ -95,7 +95,7 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC
             request.getRestApiVersion()
         );
 
-        return channel -> client.bulk(bulkRequest, new RestToXContentListener<>(channel));
+        return channel -> client.bulk(bulkRequest, new RestRefCountedChunkedToXContentListener<>(channel));
     }
 
     @Override
diff --git a/server/src/main/java/org/elasticsearch/rest/action/ingest/RestSimulateIngestAction.java b/server/src/main/java/org/elasticsearch/rest/action/ingest/RestSimulateIngestAction.java
index e0d9dd95206cf..2c9b84f78636a 100644
--- a/server/src/main/java/org/elasticsearch/rest/action/ingest/RestSimulateIngestAction.java
+++ b/server/src/main/java/org/elasticsearch/rest/action/ingest/RestSimulateIngestAction.java
@@ -26,7 +26,7 @@
 import org.elasticsearch.rest.RestStatus;
 import org.elasticsearch.rest.Scope;
 import org.elasticsearch.rest.ServerlessScope;
-import org.elasticsearch.rest.action.RestToXContentListener;
+import org.elasticsearch.rest.action.RestBuilderListener;
 import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
 import org.elasticsearch.xcontent.ToXContent;
 import org.elasticsearch.xcontent.XContentBuilder;
@@ -140,7 +140,7 @@ static BytesReference convertToBulkRequestXContentBytes(Map sour
      * simulate-style xcontent.
      * Non-private for unit testing
      */
-    static class SimulateIngestRestToXContentListener extends RestToXContentListener {
+    static class SimulateIngestRestToXContentListener extends RestBuilderListener {
 
         SimulateIngestRestToXContentListener(RestChannel channel) {
             super(channel);
@@ -150,8 +150,7 @@ static class SimulateIngestRestToXContentListener extends RestToXContentListener
         public RestResponse buildResponse(BulkResponse response, XContentBuilder builder) throws Exception {
             assert response.isFragment() == false;
             toXContent(response, builder, channel.request());
-            RestStatus restStatus = statusFunction.apply(response);
-            return new RestResponse(restStatus, builder);
+            return new RestResponse(RestStatus.OK, builder);
         }
 
         private static void toXContent(BulkResponse response, XContentBuilder builder, ToXContent.Params params) throws IOException {
diff --git a/server/src/test/java/org/elasticsearch/action/bulk/BulkResponseTests.java b/server/src/test/java/org/elasticsearch/action/bulk/BulkResponseTests.java
index 5a1c7f1572e23..c1cd88e0864a4 100644
--- a/server/src/test/java/org/elasticsearch/action/bulk/BulkResponseTests.java
+++ b/server/src/test/java/org/elasticsearch/action/bulk/BulkResponseTests.java
@@ -16,6 +16,7 @@
 import org.elasticsearch.action.index.IndexResponseTests;
 import org.elasticsearch.action.update.UpdateResponseTests;
 import org.elasticsearch.common.bytes.BytesReference;
+import org.elasticsearch.common.xcontent.ChunkedToXContent;
 import org.elasticsearch.core.Tuple;
 import org.elasticsearch.test.ESTestCase;
 import org.elasticsearch.xcontent.ToXContent;
@@ -68,7 +69,12 @@ public void testToAndFromXContent() throws IOException {
         }
 
         BulkResponse bulkResponse = new BulkResponse(bulkItems, took, ingestTook);
-        BytesReference originalBytes = toShuffledXContent(bulkResponse, xContentType, ToXContent.EMPTY_PARAMS, humanReadable);
+        BytesReference originalBytes = toShuffledXContent(
+            ChunkedToXContent.wrapAsToXContent(bulkResponse),
+            xContentType,
+            ToXContent.EMPTY_PARAMS,
+            humanReadable
+        );
 
         BulkResponse parsedBulkResponse;
         try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) {

From c22ff0659c5f4c91cd4994db2e4a7ae961acadbd Mon Sep 17 00:00:00 2001
From: Nik Everett 
Date: Fri, 12 Jan 2024 07:58:00 -0500
Subject: [PATCH 71/75] ESQL: Test all operators with breaker (#104267)

This modifies the operator tests to *always* test with a breaker without
the ability to opt out. Previously three operations opted out:
1. mv_expand never through exceptions even though it should. It was
   using the block factory of the test blocks which didn't have a
   breaker. I modified the test blocks to take a breaker so now it
   properly breaks.
2. reading values was throwing strange exceptions when I first wrote
   these tests and I didn't have time to get it work. I don't 100%
   recall what those exceptions were but they seem to be gone now. Good
   fairies?
3. The "project" operator doesn't allocate much of anything - it just
   drops or shifts block around. But the work I did to move the test
   blocks under the limit makes it so the test itself can throw. That's
   good enough for this.
---
 .../operator/CannedSourceOperator.java        | 27 ++++++++++++-------
 .../compute/operator/LimitOperatorTests.java  |  7 -----
 .../operator/MvExpandOperatorTests.java       |  7 -----
 .../compute/operator/OperatorTestCase.java    | 16 +++++------
 .../operator/ProjectOperatorTests.java        |  7 -----
 5 files changed, 23 insertions(+), 41 deletions(-)

diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/CannedSourceOperator.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/CannedSourceOperator.java
index 01f51b32edb1d..4d5a6260ed02d 100644
--- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/CannedSourceOperator.java
+++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/CannedSourceOperator.java
@@ -79,18 +79,25 @@ public static Page mergePages(List pages) {
      * Make a deep copy of some pages. Useful so that when the originals are
      * released the copies are still live.
      */
-    public static List deepCopyOf(List pages) {
+    public static List deepCopyOf(BlockFactory blockFactory, List pages) {
         List out = new ArrayList<>(pages.size());
-        BlockFactory blockFactory = TestBlockFactory.getNonBreakingInstance();
-        for (Page p : pages) {
-            Block[] blocks = new Block[p.getBlockCount()];
-            for (int b = 0; b < blocks.length; b++) {
-                Block orig = p.getBlock(b);
-                Block.Builder builder = orig.elementType().newBlockBuilder(p.getPositionCount(), blockFactory);
-                builder.copyFrom(orig, 0, p.getPositionCount());
-                blocks[b] = builder.build();
+        try {
+            for (Page p : pages) {
+                Block[] blocks = new Block[p.getBlockCount()];
+                for (int b = 0; b < blocks.length; b++) {
+                    Block orig = p.getBlock(b);
+                    try (Block.Builder builder = orig.elementType().newBlockBuilder(p.getPositionCount(), blockFactory)) {
+                        builder.copyFrom(orig, 0, p.getPositionCount());
+                        blocks[b] = builder.build();
+                    }
+                }
+                out.add(new Page(blocks));
+            }
+        } finally {
+            if (pages.size() != out.size()) {
+                // failed to copy all the pages, we're bubbling out an exception. So we have to close the copy.
+                Releasables.closeExpectNoException(Releasables.wrap(() -> Iterators.map(out.iterator(), p -> p::releaseBlocks)));
             }
-            out.add(new Page(blocks));
         }
         return out;
     }
diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LimitOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LimitOperatorTests.java
index e366646ecd0f5..d2db9c7b48da6 100644
--- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LimitOperatorTests.java
+++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LimitOperatorTests.java
@@ -7,7 +7,6 @@
 
 package org.elasticsearch.compute.operator;
 
-import org.elasticsearch.common.unit.ByteSizeValue;
 import org.elasticsearch.compute.data.BasicBlockTests;
 import org.elasticsearch.compute.data.Block;
 import org.elasticsearch.compute.data.BlockFactory;
@@ -49,12 +48,6 @@ protected void assertSimpleOutput(List input, List results) {
         assertThat(outputPositionCount, equalTo(Math.min(100, inputPositionCount)));
     }
 
-    @Override
-    protected ByteSizeValue enoughMemoryForSimple() {
-        assumeFalse("doesn't allocate, just filters", true);
-        return null;
-    }
-
     public void testStatus() {
         BlockFactory blockFactory = driverContext().blockFactory();
         LimitOperator op = simple().get(driverContext());
diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MvExpandOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MvExpandOperatorTests.java
index 165e5b80b9a58..02517e8fafe1a 100644
--- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MvExpandOperatorTests.java
+++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MvExpandOperatorTests.java
@@ -7,7 +7,6 @@
 
 package org.elasticsearch.compute.operator;
 
-import org.elasticsearch.common.unit.ByteSizeValue;
 import org.elasticsearch.compute.data.Block;
 import org.elasticsearch.compute.data.BlockFactory;
 import org.elasticsearch.compute.data.ElementType;
@@ -199,12 +198,6 @@ protected void assertSimpleOutput(List input, List results) {
         assertThat(resultIter2.hasNext(), equalTo(false));
     }
 
-    @Override
-    protected ByteSizeValue enoughMemoryForSimple() {
-        assumeFalse("doesn't throw in tests but probably should", true);
-        return ByteSizeValue.ofBytes(1);
-    }
-
     public void testNoopStatus() {
         BlockFactory blockFactory = blockFactory();
         MvExpandOperator op = new MvExpandOperator(0, randomIntBetween(1, 1000));
diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/OperatorTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/OperatorTestCase.java
index 0890ba669f0a2..68a2bde0c2f6c 100644
--- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/OperatorTestCase.java
+++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/OperatorTestCase.java
@@ -41,6 +41,7 @@
 
 import static org.hamcrest.Matchers.empty;
 import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.in;
 
 /**
  * Base tests for {@link Operator}s that are not {@link SourceOperator} or {@link SinkOperator}.
@@ -97,16 +98,10 @@ public final void testSimpleCircuitBreaking() {
         DriverContext inputFactoryContext = driverContext();
         List input = CannedSourceOperator.collectPages(simpleInput(inputFactoryContext.blockFactory(), between(1_000, 10_000)));
         try {
-            ByteSizeValue limit = BreakerTestUtil.findBreakerLimit(
-                memoryLimitForSimple,
-                l -> runWithLimit(simple, CannedSourceOperator.deepCopyOf(input), l)
-            );
+            ByteSizeValue limit = BreakerTestUtil.findBreakerLimit(memoryLimitForSimple, l -> runWithLimit(simple, input, l));
             ByteSizeValue testWithSize = ByteSizeValue.ofBytes(randomLongBetween(0, limit.getBytes()));
             logger.info("testing with {} against a limit of {}", testWithSize, limit);
-            Exception e = expectThrows(
-                CircuitBreakingException.class,
-                () -> runWithLimit(simple, CannedSourceOperator.deepCopyOf(input), testWithSize)
-            );
+            Exception e = expectThrows(CircuitBreakingException.class, () -> runWithLimit(simple, input, testWithSize));
             assertThat(e.getMessage(), equalTo(MockBigArrays.ERROR_MESSAGE));
         } finally {
             Releasables.closeExpectNoException(Releasables.wrap(() -> Iterators.map(input.iterator(), p -> p::releaseBlocks)));
@@ -119,15 +114,16 @@ private void runWithLimit(Operator.OperatorFactory factory, List input, By
         CircuitBreaker breaker = bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST);
         BlockFactory blockFactory = BlockFactory.getInstance(breaker, bigArrays);
         DriverContext driverContext = new DriverContext(bigArrays, blockFactory);
+        List localInput = CannedSourceOperator.deepCopyOf(blockFactory, input);
         boolean driverStarted = false;
         try {
             var operator = factory.get(driverContext);
             driverStarted = true;
-            drive(operator, input.iterator(), driverContext);
+            drive(operator, localInput.iterator(), driverContext);
         } finally {
             if (driverStarted == false) {
                 // if drive hasn't even started then we need to release the input pages manually
-                Releasables.closeExpectNoException(Releasables.wrap(() -> Iterators.map(input.iterator(), p -> p::releaseBlocks)));
+                Releasables.closeExpectNoException(Releasables.wrap(() -> Iterators.map(localInput.iterator(), p -> p::releaseBlocks)));
             }
             assertThat(bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST).getUsed(), equalTo(0L));
         }
diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ProjectOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ProjectOperatorTests.java
index 572657c7c8226..26b9b16d7b24e 100644
--- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ProjectOperatorTests.java
+++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ProjectOperatorTests.java
@@ -7,7 +7,6 @@
 
 package org.elasticsearch.compute.operator;
 
-import org.elasticsearch.common.unit.ByteSizeValue;
 import org.elasticsearch.compute.data.Block;
 import org.elasticsearch.compute.data.BlockFactory;
 import org.elasticsearch.compute.data.IntBlock;
@@ -95,12 +94,6 @@ protected void assertSimpleOutput(List input, List results) {
         assertThat(total, equalTo(input.stream().mapToInt(Page::getPositionCount).sum()));
     }
 
-    @Override
-    protected ByteSizeValue enoughMemoryForSimple() {
-        assumeTrue("doesn't allocate", false);
-        return null;
-    }
-
     public void testDescriptionOfMany() {
         ProjectOperator.ProjectOperatorFactory factory = new ProjectOperator.ProjectOperatorFactory(
             IntStream.range(0, 100).boxed().toList()

From bb1a9874d0af66032de2e4cc09331c3942ec7a69 Mon Sep 17 00:00:00 2001
From: Nik Everett 
Date: Fri, 12 Jan 2024 08:08:32 -0500
Subject: [PATCH 72/75] Disable ESQL async test

It's failing due to some timing issues. I'll build a fix later today but
let's get it out of everyone's way for now.

Tracked by #104294
---
 .../qa/single_node/EsqlClientYamlAsyncSubmitAndFetchIT.java     | 2 ++
 1 file changed, 2 insertions(+)

diff --git a/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlClientYamlAsyncSubmitAndFetchIT.java b/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlClientYamlAsyncSubmitAndFetchIT.java
index 312175c92246a..be2bfcb8a2787 100644
--- a/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlClientYamlAsyncSubmitAndFetchIT.java
+++ b/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlClientYamlAsyncSubmitAndFetchIT.java
@@ -9,6 +9,7 @@
 
 import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
 
+import org.apache.lucene.tests.util.LuceneTestCase;
 import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
 import org.elasticsearch.test.rest.yaml.section.ApiCallSection;
 import org.elasticsearch.test.rest.yaml.section.DoSection;
@@ -20,6 +21,7 @@
 /**
  * Run the ESQL yaml tests async and then fetch the results with a long wait time.
  */
+@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/104294")
 public class EsqlClientYamlAsyncSubmitAndFetchIT extends AbstractEsqlClientYamlIT {
     public EsqlClientYamlAsyncSubmitAndFetchIT(final ClientYamlTestCandidate testCandidate) {
         super(testCandidate);

From ce94c10a358b67f86ecd5e0c48401f22d4df7c47 Mon Sep 17 00:00:00 2001
From: Pooya Salehi 
Date: Fri, 12 Jan 2024 14:37:28 +0100
Subject: [PATCH 73/75] Consider old version map in IndexBufferRAMBytesUsed
 (#104122)

Separated from https://github.com/elastic/elasticsearch/pull/103979.
Currently, `InternalEngine#getIndexBufferRAMBytesUsed` considers
`versionMap.ramBytesUsedForRefresh()` as the refresh-related memory
usage of the version map. However, `versionMap.ramBytesUsedForRefresh()`
only considers the `current` map. This value (as part of
`shard.getIndexBufferRAMBytesUsed()`) is used in the
`IndexingMemoryController` as the current usage and then the amount of
heap currently being freed (which for version map is the `old` map) is
[subtracted from
it](https://github.com/elastic/elasticsearch/blob/main/server/src/main/java/org/elasticsearch/indices/IndexingMemoryController.java#L372).
I think, we should consider both `old` and `current` in
`getIndexBufferRAMBytesUsed`.
---
 docs/changelog/104122.yaml                    |  5 +++++
 .../elasticsearch/index/engine/Engine.java    |  3 ++-
 .../index/engine/InternalEngine.java          |  4 ++--
 .../index/engine/LiveVersionMap.java          | 12 +++++++++--
 .../index/engine/LiveVersionMapTests.java     | 21 +++++++++++++++++++
 5 files changed, 40 insertions(+), 5 deletions(-)
 create mode 100644 docs/changelog/104122.yaml

diff --git a/docs/changelog/104122.yaml b/docs/changelog/104122.yaml
new file mode 100644
index 0000000000000..a88d7499bd44e
--- /dev/null
+++ b/docs/changelog/104122.yaml
@@ -0,0 +1,5 @@
+pr: 104122
+summary: Consider currently refreshing data in the memory usage of refresh
+area: Engine
+type: bug
+issues: []
diff --git a/server/src/main/java/org/elasticsearch/index/engine/Engine.java b/server/src/main/java/org/elasticsearch/index/engine/Engine.java
index 9b9cf8ad35c04..3849095a94e6e 100644
--- a/server/src/main/java/org/elasticsearch/index/engine/Engine.java
+++ b/server/src/main/java/org/elasticsearch/index/engine/Engine.java
@@ -1007,7 +1007,8 @@ protected void writerSegmentStats(SegmentsStats stats) {
         stats.addIndexWriterMemoryInBytes(0);
     }
 
-    /** How much heap is used that would be freed by a refresh.  Note that this may throw {@link AlreadyClosedException}. */
+    /** How much heap is used that would be freed by a refresh. This includes both the current memory being freed and any remaining
+     * memory usage that could be freed, e.g., by refreshing. Note that this may throw {@link AlreadyClosedException}. */
     public abstract long getIndexBufferRAMBytesUsed();
 
     final Segment[] getSegmentInfo(SegmentInfos lastCommittedSegmentInfos) {
diff --git a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java
index 8affee4330074..65834a8c011f2 100644
--- a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java
+++ b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java
@@ -2093,14 +2093,14 @@ protected final RefreshResult refresh(String source, SearcherScope scope, boolea
 
     @Override
     public void writeIndexingBuffer() throws IOException {
-        final long versionMapBytesUsed = versionMap.ramBytesUsedForRefresh();
+        final long reclaimableVersionMapBytes = versionMap.reclaimableRefreshRamBytes();
         // Only count bytes that are not already being written to disk. Note: this number may be negative at times if these two metrics get
         // updated concurrently. It's fine as it's only being used as a heuristic to decide on a full refresh vs. writing a single segment.
         // TODO: it might be more relevant to use the RAM usage of the largest DWPT as opposed to the overall RAM usage? Can we get this
         // exposed in Lucene?
         final long indexWriterBytesUsed = indexWriter.ramBytesUsed() - indexWriter.getFlushingBytes();
 
-        if (versionMapBytesUsed >= indexWriterBytesUsed) {
+        if (reclaimableVersionMapBytes >= indexWriterBytesUsed) {
             // This method expects to reclaim memory quickly, so if the version map is using more memory than the IndexWriter buffer then we
             // do a refresh, which is the only way to reclaim memory from the version map. IndexWriter#flushNextBuffer has similar logic: if
             // pending deletes occupy more than half of RAMBufferSizeMB then deletes are applied too.
diff --git a/server/src/main/java/org/elasticsearch/index/engine/LiveVersionMap.java b/server/src/main/java/org/elasticsearch/index/engine/LiveVersionMap.java
index 1cee2a90ec3f1..7cc1b92b43c43 100644
--- a/server/src/main/java/org/elasticsearch/index/engine/LiveVersionMap.java
+++ b/server/src/main/java/org/elasticsearch/index/engine/LiveVersionMap.java
@@ -476,10 +476,18 @@ public long ramBytesUsed() {
     }
 
     /**
-     * Returns how much RAM would be freed up by refreshing. This is the RAM usage of the current version map. It doesn't include tombstones
-     * since they don't get cleared on refresh, nor the old version map that is being reclaimed.
+     * Returns how much RAM is used by refresh. This is the RAM usage of the current and old version maps.
      */
     long ramBytesUsedForRefresh() {
+        return maps.ramBytesUsed();
+    }
+
+    /**
+     * Returns how much RAM could be reclaimed from the version map. This is the RAM usage of the current version map, and could be
+     * reclaimed by refreshing. It doesn't include tombstones since they don't get cleared on refresh, nor the old version map that
+     * is being reclaimed.
+     */
+    long reclaimableRefreshRamBytes() {
         return maps.current.ramBytesUsed.get();
     }
 
diff --git a/server/src/test/java/org/elasticsearch/index/engine/LiveVersionMapTests.java b/server/src/test/java/org/elasticsearch/index/engine/LiveVersionMapTests.java
index 5ca7aadc35fa7..8d357413b09cd 100644
--- a/server/src/test/java/org/elasticsearch/index/engine/LiveVersionMapTests.java
+++ b/server/src/test/java/org/elasticsearch/index/engine/LiveVersionMapTests.java
@@ -495,4 +495,25 @@ public void testVersionLookupRamBytesUsed() {
             .sum();
         assertEquals(actualRamBytesUsed, vl.ramBytesUsed());
     }
+
+    public void testVersionMapReclaimableRamBytes() throws IOException {
+        LiveVersionMap map = new LiveVersionMap();
+        assertEquals(map.ramBytesUsedForRefresh(), 0L);
+        assertEquals(map.reclaimableRefreshRamBytes(), 0L);
+        IntStream.range(0, randomIntBetween(10, 100)).forEach(i -> {
+            BytesRefBuilder uid = new BytesRefBuilder();
+            uid.copyChars(TestUtil.randomSimpleString(random(), 10, 20));
+            try (Releasable r = map.acquireLock(uid.toBytesRef())) {
+                map.putIndexUnderLock(uid.toBytesRef(), randomIndexVersionValue());
+            }
+        });
+        assertThat(map.reclaimableRefreshRamBytes(), greaterThan(0L));
+        assertEquals(map.reclaimableRefreshRamBytes(), map.ramBytesUsedForRefresh());
+        map.beforeRefresh();
+        assertEquals(map.reclaimableRefreshRamBytes(), 0L);
+        assertThat(map.ramBytesUsedForRefresh(), greaterThan(0L));
+        map.afterRefresh(randomBoolean());
+        assertEquals(map.reclaimableRefreshRamBytes(), 0L);
+        assertEquals(map.ramBytesUsedForRefresh(), 0L);
+    }
 }

From 063fc26a20beadddad39d0d217ea26e0139a5052 Mon Sep 17 00:00:00 2001
From: Moritz Mack 
Date: Fri, 12 Jan 2024 14:56:32 +0100
Subject: [PATCH 74/75] Temporarily tolerate
 tracing.apm.agent.global_labels.XYZ settings  (#104315)

---
 .../telemetry/apm/internal/APMAgentSettings.java              | 4 ++++
 1 file changed, 4 insertions(+)

diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettings.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettings.java
index 0ee13dae70740..12e81e7ae78e1 100644
--- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettings.java
+++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettings.java
@@ -227,6 +227,10 @@ public void setAgentSetting(String key, String value) {
             final String key = parts[parts.length - 1];
             return new Setting<>(qualifiedKey, "", (value) -> {
                 if (qualifiedKey.equals("_na_") == false && PERMITTED_AGENT_KEYS.contains(key) == false) {
+                    // TODO figure out why those settings are kept, these should be reformatted / removed by now
+                    if (key.startsWith("global_labels.")) {
+                        return value;
+                    }
                     throw new IllegalArgumentException("Configuration [" + qualifiedKey + "] is either prohibited or unknown.");
                 }
                 return value;

From 5ca63c5b501a8c9d16eda6a3ff57c9ed0597cb3a Mon Sep 17 00:00:00 2001
From: Nik Everett 
Date: Fri, 12 Jan 2024 09:03:07 -0500
Subject: [PATCH 75/75] ESQL: Remove SearchContext from Operators (#104290)

This removes the *giant* `SearchContext` class from our `Operator`s.
It's just *so* **so** ***so*** big. It's hard to test with. You can do
it. There's a lot of crazy mocking. Lots of reading.

The replacement is two interfaces, on in the compute engine, called
`ShardContext`. It contains the methods that are required to power all
of the `LuceneSourceOperator`s. It has the `IndexSearcher`, can build
sorts, and has a few identifiers.

The other interface is also called `ShardContext`, but this one lives in
esql itself and it has all of the things needed to power the
`EsPhysicalOperationProviders` - mostly stuff to configure the value
fetching operator and build queries.

There exists a "production" implementation of these interfaces in
`EsPhysicalOperationProviders` and a test implementation in
`LuceneSourceOperatorTests`. It's super easy to plug in non-production
implementations that don't drag in 45% of all of Elasticsearch.
---
 .../compute/lucene/BlockReaderFactories.java  |  74 ------
 .../compute/lucene/LuceneCountOperator.java   |   7 +-
 .../compute/lucene/LuceneOperator.java        |  27 +--
 .../compute/lucene/LuceneSlice.java           |   4 +-
 .../compute/lucene/LuceneSliceQueue.java      |  16 +-
 .../compute/lucene/LuceneSourceOperator.java  |   9 +-
 .../lucene/LuceneTopNSourceOperator.java      |  27 +--
 .../compute/lucene/ShardContext.java          |  42 ++++
 .../elasticsearch/compute/OperatorTests.java  |   6 +-
 .../lucene/LuceneCountOperatorTests.java      |   5 +-
 .../lucene/LuceneSourceOperatorTests.java     |  92 ++++----
 .../lucene/LuceneTopNSourceOperatorTests.java |  43 ++--
 .../ValuesSourceReaderOperatorTests.java      |  10 +-
 .../esql/enrich/EnrichLookupService.java      |   8 +-
 .../planner/EsPhysicalOperationProviders.java | 212 +++++++++++++-----
 .../esql/planner/LocalExecutionPlanner.java   |  16 +-
 .../xpack/esql/plugin/ComputeService.java     |  13 +-
 .../planner/LocalExecutionPlannerTests.java   |  16 +-
 18 files changed, 332 insertions(+), 295 deletions(-)
 delete mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/BlockReaderFactories.java
 create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/ShardContext.java

diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/BlockReaderFactories.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/BlockReaderFactories.java
deleted file mode 100644
index 95b3ee9c10ff0..0000000000000
--- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/BlockReaderFactories.java
+++ /dev/null
@@ -1,74 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0; you may not use this file except in compliance with the Elastic License
- * 2.0.
- */
-
-package org.elasticsearch.compute.lucene;
-
-import org.elasticsearch.common.logging.HeaderWarning;
-import org.elasticsearch.index.mapper.BlockLoader;
-import org.elasticsearch.index.mapper.FieldNamesFieldMapper;
-import org.elasticsearch.index.mapper.MappedFieldType;
-import org.elasticsearch.index.query.SearchExecutionContext;
-import org.elasticsearch.search.lookup.SearchLookup;
-
-import java.util.Set;
-
-/**
- * Resolves *how* ESQL loads field values.
- */
-public final class BlockReaderFactories {
-    private BlockReaderFactories() {}
-
-    /**
-     * Resolves *how* ESQL loads field values.
-     * @param ctx a search context for the index we're loading field from
-     * @param fieldName the name of the field to load
-     * @param asUnsupportedSource should the field be loaded as "unsupported"?
-     *                            These will always have {@code null} values
-     */
-    public static BlockLoader loader(SearchExecutionContext ctx, String fieldName, boolean asUnsupportedSource) {
-        if (asUnsupportedSource) {
-            return BlockLoader.CONSTANT_NULLS;
-        }
-        MappedFieldType fieldType = ctx.getFieldType(fieldName);
-        if (fieldType == null) {
-            // the field does not exist in this context
-            return BlockLoader.CONSTANT_NULLS;
-        }
-        BlockLoader loader = fieldType.blockLoader(new MappedFieldType.BlockLoaderContext() {
-            @Override
-            public String indexName() {
-                return ctx.getFullyQualifiedIndex().getName();
-            }
-
-            @Override
-            public SearchLookup lookup() {
-                return ctx.lookup();
-            }
-
-            @Override
-            public Set sourcePaths(String name) {
-                return ctx.sourcePath(name);
-            }
-
-            @Override
-            public String parentField(String field) {
-                return ctx.parentPath(field);
-            }
-
-            @Override
-            public FieldNamesFieldMapper.FieldNamesFieldType fieldNames() {
-                return (FieldNamesFieldMapper.FieldNamesFieldType) ctx.lookup().fieldType(FieldNamesFieldMapper.NAME);
-            }
-        });
-        if (loader == null) {
-            HeaderWarning.addWarning("Field [{}] cannot be retrieved, it is unsupported or not indexed; returning null", fieldName);
-            return BlockLoader.CONSTANT_NULLS;
-        }
-
-        return loader;
-    }
-}
diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneCountOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneCountOperator.java
index 4ed32d6552497..4dda5c16295fb 100644
--- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneCountOperator.java
+++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneCountOperator.java
@@ -20,7 +20,6 @@
 import org.elasticsearch.compute.operator.DriverContext;
 import org.elasticsearch.compute.operator.SourceOperator;
 import org.elasticsearch.core.Releasables;
-import org.elasticsearch.search.internal.SearchContext;
 
 import java.io.IOException;
 import java.io.UncheckedIOException;
@@ -49,8 +48,8 @@ public static class Factory implements LuceneOperator.Factory {
         private final LuceneSliceQueue sliceQueue;
 
         public Factory(
-            List searchContexts,
-            Function queryFunction,
+            List contexts,
+            Function queryFunction,
             DataPartitioning dataPartitioning,
             int taskConcurrency,
             int limit
@@ -58,7 +57,7 @@ public Factory(
             this.limit = limit;
             this.dataPartitioning = dataPartitioning;
             var weightFunction = weightFunction(queryFunction, ScoreMode.COMPLETE_NO_SCORES);
-            this.sliceQueue = LuceneSliceQueue.create(searchContexts, weightFunction, dataPartitioning, taskConcurrency);
+            this.sliceQueue = LuceneSliceQueue.create(contexts, weightFunction, dataPartitioning, taskConcurrency);
             this.taskConcurrency = Math.min(sliceQueue.totalSlices(), taskConcurrency);
         }
 
diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java
index 21b2a4cfaeb0b..1eeedd06d058d 100644
--- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java
+++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java
@@ -26,7 +26,6 @@
 import org.elasticsearch.compute.operator.SourceOperator;
 import org.elasticsearch.logging.LogManager;
 import org.elasticsearch.logging.Logger;
-import org.elasticsearch.search.internal.SearchContext;
 import org.elasticsearch.xcontent.XContentBuilder;
 
 import java.io.IOException;
@@ -90,11 +89,7 @@ LuceneScorer getCurrentOrLoadNextScorer() {
                     continue;
                 }
                 processedSlices++;
-                processedShards.add(
-                    currentSlice.searchContext().getSearchExecutionContext().getFullyQualifiedIndex().getName()
-                        + ":"
-                        + currentSlice.searchContext().getSearchExecutionContext().getShardId()
-                );
+                processedShards.add(currentSlice.shardContext().shardIdentifier());
             }
             final PartialLeafReaderContext partialLeaf = currentSlice.getLeaf(sliceIndex++);
             logger.trace("Starting {}", partialLeaf);
@@ -102,7 +97,7 @@ LuceneScorer getCurrentOrLoadNextScorer() {
             if (currentScorer == null || currentScorer.leafReaderContext() != leaf) {
                 final Weight weight = currentSlice.weight().get();
                 processedQueries.add(weight.getQuery());
-                currentScorer = new LuceneScorer(currentSlice.shardIndex(), currentSlice.searchContext(), weight, leaf);
+                currentScorer = new LuceneScorer(currentSlice.shardContext(), weight, leaf);
             }
             assert currentScorer.maxPosition <= partialLeaf.maxDoc() : currentScorer.maxPosition + ">" + partialLeaf.maxDoc();
             currentScorer.maxPosition = partialLeaf.maxDoc();
@@ -118,8 +113,7 @@ LuceneScorer getCurrentOrLoadNextScorer() {
      * Wraps a {@link BulkScorer} with shard information
      */
     static final class LuceneScorer {
-        private final int shardIndex;
-        private final SearchContext searchContext;
+        private final ShardContext shardContext;
         private final Weight weight;
         private final LeafReaderContext leafReaderContext;
 
@@ -128,9 +122,8 @@ static final class LuceneScorer {
         private int maxPosition;
         private Thread executingThread;
 
-        LuceneScorer(int shardIndex, SearchContext searchContext, Weight weight, LeafReaderContext leafReaderContext) {
-            this.shardIndex = shardIndex;
-            this.searchContext = searchContext;
+        LuceneScorer(ShardContext shardContext, Weight weight, LeafReaderContext leafReaderContext) {
+            this.shardContext = shardContext;
             this.weight = weight;
             this.leafReaderContext = leafReaderContext;
             reinitialize();
@@ -165,12 +158,8 @@ void markAsDone() {
             position = DocIdSetIterator.NO_MORE_DOCS;
         }
 
-        int shardIndex() {
-            return shardIndex;
-        }
-
-        SearchContext searchContext() {
-            return searchContext;
+        ShardContext shardContext() {
+            return shardContext;
         }
 
         Weight weight() {
@@ -377,7 +366,7 @@ public String toString() {
         }
     }
 
-    static Function weightFunction(Function queryFunction, ScoreMode scoreMode) {
+    static Function weightFunction(Function queryFunction, ScoreMode scoreMode) {
         return ctx -> {
             final var query = queryFunction.apply(ctx);
             final var searcher = ctx.searcher();
diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSlice.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSlice.java
index c3fe03ae88bb3..716df6844e79f 100644
--- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSlice.java
+++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSlice.java
@@ -8,7 +8,6 @@
 package org.elasticsearch.compute.lucene;
 
 import org.apache.lucene.search.Weight;
-import org.elasticsearch.search.internal.SearchContext;
 
 import java.util.List;
 import java.util.function.Supplier;
@@ -16,8 +15,7 @@
 /**
  * Holds a list of multiple partial Lucene segments
  */
-public record LuceneSlice(int shardIndex, SearchContext searchContext, List leaves, Supplier weight) {
-
+public record LuceneSlice(ShardContext shardContext, List leaves, Supplier weight) {
     int numLeaves() {
         return leaves.size();
     }
diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSliceQueue.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSliceQueue.java
index faf3d6437282a..d0329174f2839 100644
--- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSliceQueue.java
+++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSliceQueue.java
@@ -12,7 +12,6 @@
 import org.apache.lucene.search.IndexSearcher;
 import org.apache.lucene.search.Weight;
 import org.elasticsearch.core.Nullable;
-import org.elasticsearch.search.internal.SearchContext;
 
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -48,24 +47,23 @@ public int totalSlices() {
     }
 
     public static LuceneSliceQueue create(
-        List searchContexts,
-        Function weightFunction,
+        List contexts,
+        Function weightFunction,
         DataPartitioning dataPartitioning,
         int taskConcurrency
     ) {
         final List slices = new ArrayList<>();
-        for (int shardIndex = 0; shardIndex < searchContexts.size(); shardIndex++) {
-            final SearchContext searchContext = searchContexts.get(shardIndex);
-            final List leafContexts = searchContext.searcher().getLeafContexts();
+        for (ShardContext ctx : contexts) {
+            final List leafContexts = ctx.searcher().getLeafContexts();
             List> groups = switch (dataPartitioning) {
                 case SHARD -> Collections.singletonList(leafContexts.stream().map(PartialLeafReaderContext::new).toList());
                 case SEGMENT -> segmentSlices(leafContexts);
-                case DOC -> docSlices(searchContext.searcher().getIndexReader(), taskConcurrency);
+                case DOC -> docSlices(ctx.searcher().getIndexReader(), taskConcurrency);
             };
             final Weight[] cachedWeight = new Weight[1];
             final Supplier weight = () -> {
                 if (cachedWeight[0] == null) {
-                    cachedWeight[0] = weightFunction.apply(searchContext);
+                    cachedWeight[0] = weightFunction.apply(ctx);
                 }
                 return cachedWeight[0];
             };
@@ -73,7 +71,7 @@ public static LuceneSliceQueue create(
                 weight.get(); // eagerly build Weight once
             }
             for (List group : groups) {
-                slices.add(new LuceneSlice(shardIndex, searchContext, group, weight));
+                slices.add(new LuceneSlice(ctx, group, weight));
             }
         }
         return new LuceneSliceQueue(slices);
diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSourceOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSourceOperator.java
index b636e4aba8a5e..9d6e3f46d0e1e 100644
--- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSourceOperator.java
+++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSourceOperator.java
@@ -19,7 +19,6 @@
 import org.elasticsearch.compute.operator.DriverContext;
 import org.elasticsearch.compute.operator.SourceOperator;
 import org.elasticsearch.core.Releasables;
-import org.elasticsearch.search.internal.SearchContext;
 
 import java.io.IOException;
 import java.io.UncheckedIOException;
@@ -46,8 +45,8 @@ public static class Factory implements LuceneOperator.Factory {
         private final LuceneSliceQueue sliceQueue;
 
         public Factory(
-            List searchContexts,
-            Function queryFunction,
+            List contexts,
+            Function queryFunction,
             DataPartitioning dataPartitioning,
             int taskConcurrency,
             int maxPageSize,
@@ -57,7 +56,7 @@ public Factory(
             this.limit = limit;
             this.dataPartitioning = dataPartitioning;
             var weightFunction = weightFunction(queryFunction, ScoreMode.COMPLETE_NO_SCORES);
-            this.sliceQueue = LuceneSliceQueue.create(searchContexts, weightFunction, dataPartitioning, taskConcurrency);
+            this.sliceQueue = LuceneSliceQueue.create(contexts, weightFunction, dataPartitioning, taskConcurrency);
             this.taskConcurrency = Math.min(sliceQueue.totalSlices(), taskConcurrency);
         }
 
@@ -149,7 +148,7 @@ public Page getOutput() {
                 IntBlock leaf = null;
                 IntVector docs = null;
                 try {
-                    shard = blockFactory.newConstantIntBlockWith(scorer.shardIndex(), currentPagePos);
+                    shard = blockFactory.newConstantIntBlockWith(scorer.shardContext().index(), currentPagePos);
                     leaf = blockFactory.newConstantIntBlockWith(scorer.leafReaderContext().ord, currentPagePos);
                     docs = docsBuilder.build();
                     docsBuilder = blockFactory.newIntVectorBuilder(Math.min(remainingDocs, maxPageSize));
diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java
index 7f08c8ca66821..8cb9173adc197 100644
--- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java
+++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java
@@ -24,7 +24,6 @@
 import org.elasticsearch.compute.operator.DriverContext;
 import org.elasticsearch.compute.operator.SourceOperator;
 import org.elasticsearch.core.Releasables;
-import org.elasticsearch.search.internal.SearchContext;
 import org.elasticsearch.search.sort.SortAndFormats;
 import org.elasticsearch.search.sort.SortBuilder;
 
@@ -49,8 +48,8 @@ public static final class Factory implements LuceneOperator.Factory {
         private final LuceneSliceQueue sliceQueue;
 
         public Factory(
-            List searchContexts,
-            Function queryFunction,
+            List contexts,
+            Function queryFunction,
             DataPartitioning dataPartitioning,
             int taskConcurrency,
             int maxPageSize,
@@ -62,7 +61,7 @@ public Factory(
             this.limit = limit;
             this.dataPartitioning = dataPartitioning;
             var weightFunction = weightFunction(queryFunction, ScoreMode.TOP_DOCS);
-            this.sliceQueue = LuceneSliceQueue.create(searchContexts, weightFunction, dataPartitioning, taskConcurrency);
+            this.sliceQueue = LuceneSliceQueue.create(contexts, weightFunction, dataPartitioning, taskConcurrency);
             this.taskConcurrency = Math.min(sliceQueue.totalSlices(), taskConcurrency);
         }
 
@@ -156,9 +155,9 @@ private Page collect() {
             return emit(true);
         }
         try {
-            if (perShardCollector == null || perShardCollector.shardIndex != scorer.shardIndex()) {
+            if (perShardCollector == null || perShardCollector.shardContext.index() != scorer.shardContext().index()) {
                 // TODO: share the bottom between shardCollectors
-                perShardCollector = new PerShardCollector(scorer.shardIndex(), scorer.searchContext(), sorts, limit);
+                perShardCollector = new PerShardCollector(scorer.shardContext(), sorts, limit);
             }
             var leafCollector = perShardCollector.getLeafCollector(scorer.leafReaderContext());
             scorer.scoreNextRange(leafCollector, scorer.leafReaderContext().reader().getLiveDocs(), maxPageSize);
@@ -170,7 +169,7 @@ private Page collect() {
         }
         if (scorer.isDone()) {
             var nextScorer = getCurrentOrLoadNextScorer();
-            if (nextScorer == null || nextScorer.shardIndex() != scorer.shardIndex()) {
+            if (nextScorer == null || nextScorer.shardContext().index() != scorer.shardContext().index()) {
                 return emit(true);
             }
         }
@@ -205,7 +204,7 @@ private Page emit(boolean startEmitting) {
         ) {
             int start = offset;
             offset += size;
-            List leafContexts = perShardCollector.searchContext.searcher().getLeafContexts();
+            List leafContexts = perShardCollector.shardContext.searcher().getLeafContexts();
             for (int i = start; i < offset; i++) {
                 int doc = scoreDocs[i].doc;
                 int segment = ReaderUtil.subIndex(doc, leafContexts);
@@ -213,7 +212,7 @@ private Page emit(boolean startEmitting) {
                 currentDocsBuilder.appendInt(doc - leafContexts.get(segment).docBase); // the offset inside the segment
             }
 
-            shard = blockFactory.newConstantIntBlockWith(perShardCollector.shardIndex, size);
+            shard = blockFactory.newConstantIntBlockWith(perShardCollector.shardContext.index(), size);
             segments = currentSegmentBuilder.build();
             docs = currentDocsBuilder.build();
             page = new Page(size, new DocVector(shard.asVector(), segments, docs, null).asBlock());
@@ -233,17 +232,15 @@ protected void describe(StringBuilder sb) {
     }
 
     static final class PerShardCollector {
-        private final int shardIndex;
-        private final SearchContext searchContext;
+        private final ShardContext shardContext;
         private final TopFieldCollector topFieldCollector;
         private int leafIndex;
         private LeafCollector leafCollector;
         private Thread currentThread;
 
-        PerShardCollector(int shardIndex, SearchContext searchContext, List> sorts, int limit) throws IOException {
-            this.shardIndex = shardIndex;
-            this.searchContext = searchContext;
-            Optional sortAndFormats = SortBuilder.buildSort(sorts, searchContext.getSearchExecutionContext());
+        PerShardCollector(ShardContext shardContext, List> sorts, int limit) throws IOException {
+            this.shardContext = shardContext;
+            Optional sortAndFormats = shardContext.buildSort(sorts);
             if (sortAndFormats.isEmpty()) {
                 throw new IllegalStateException("sorts must not be disabled in TopN");
             }
diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/ShardContext.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/ShardContext.java
new file mode 100644
index 0000000000000..5bf6ac8532f48
--- /dev/null
+++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/ShardContext.java
@@ -0,0 +1,42 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.compute.lucene;
+
+import org.apache.lucene.search.IndexSearcher;
+import org.elasticsearch.search.sort.SortAndFormats;
+import org.elasticsearch.search.sort.SortBuilder;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Optional;
+
+/**
+ * Context of each shard we're operating against.
+ */
+public interface ShardContext {
+    /**
+     * The index of this shard in the list of shards being processed.
+     */
+    int index();
+
+    /**
+     * Get {@link IndexSearcher} holding the actual data.
+     */
+    IndexSearcher searcher();
+
+    /**
+     * Build a "sort" configuration from an Elasticsearch style builder.
+     */
+    Optional buildSort(List> sorts) throws IOException;
+
+    /**
+     * A "name" for the shard that you can look up against other APIs like
+     * {@code _cat/shards}.
+     */
+    String shardIdentifier();
+}
diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java
index 8415b3883ad3a..45a019328940c 100644
--- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java
+++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java
@@ -47,6 +47,8 @@
 import org.elasticsearch.compute.lucene.DataPartitioning;
 import org.elasticsearch.compute.lucene.LuceneOperator;
 import org.elasticsearch.compute.lucene.LuceneSourceOperator;
+import org.elasticsearch.compute.lucene.LuceneSourceOperatorTests;
+import org.elasticsearch.compute.lucene.ShardContext;
 import org.elasticsearch.compute.lucene.ValuesSourceReaderOperator;
 import org.elasticsearch.compute.operator.AbstractPageMappingOperator;
 import org.elasticsearch.compute.operator.Driver;
@@ -65,7 +67,6 @@
 import org.elasticsearch.index.mapper.SourceLoader;
 import org.elasticsearch.index.mapper.Uid;
 import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
-import org.elasticsearch.search.internal.SearchContext;
 import org.elasticsearch.test.ESTestCase;
 
 import java.io.IOException;
@@ -79,7 +80,6 @@
 
 import static org.elasticsearch.compute.aggregation.AggregatorMode.FINAL;
 import static org.elasticsearch.compute.aggregation.AggregatorMode.INITIAL;
-import static org.elasticsearch.compute.lucene.LuceneSourceOperatorTests.mockSearchContext;
 import static org.elasticsearch.compute.operator.OperatorTestCase.randomPageSize;
 import static org.hamcrest.Matchers.contains;
 import static org.hamcrest.Matchers.empty;
@@ -345,7 +345,7 @@ public static void assertDriverContext(DriverContext driverContext) {
     }
 
     static LuceneOperator.Factory luceneOperatorFactory(IndexReader reader, Query query, int limit) {
-        final SearchContext searchContext = mockSearchContext(reader, 0);
+        final ShardContext searchContext = new LuceneSourceOperatorTests.MockShardContext(reader, 0);
         return new LuceneSourceOperator.Factory(
             List.of(searchContext),
             ctx -> query,
diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneCountOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneCountOperatorTests.java
index 8d401c2099b85..0c41cfc704f56 100644
--- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneCountOperatorTests.java
+++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneCountOperatorTests.java
@@ -26,7 +26,6 @@
 import org.elasticsearch.compute.operator.TestResultPageSinkOperator;
 import org.elasticsearch.core.IOUtils;
 import org.elasticsearch.indices.CrankyCircuitBreakerService;
-import org.elasticsearch.search.internal.SearchContext;
 import org.junit.After;
 
 import java.io.IOException;
@@ -38,7 +37,6 @@
 import static org.hamcrest.Matchers.equalTo;
 import static org.hamcrest.Matchers.is;
 import static org.hamcrest.Matchers.lessThanOrEqualTo;
-import static org.mockito.Mockito.when;
 
 public class LuceneCountOperatorTests extends AnyOperatorTestCase {
     private Directory directory = newDirectory();
@@ -82,8 +80,7 @@ private LuceneCountOperator.Factory simple(DataPartitioning dataPartitioning, in
             throw new RuntimeException(e);
         }
 
-        SearchContext ctx = LuceneSourceOperatorTests.mockSearchContext(reader, 0);
-        when(ctx.getSearchExecutionContext().getIndexReader()).thenReturn(reader);
+        ShardContext ctx = new LuceneSourceOperatorTests.MockShardContext(reader, 0);
         final Query query;
         if (enableShortcut && randomBoolean()) {
             query = new MatchAllDocsQuery();
diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorTests.java
index 19e16144e11c5..a4c6622344bea 100644
--- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorTests.java
+++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorTests.java
@@ -27,26 +27,19 @@
 import org.elasticsearch.compute.operator.OperatorTestCase;
 import org.elasticsearch.compute.operator.TestResultPageSinkOperator;
 import org.elasticsearch.core.IOUtils;
-import org.elasticsearch.index.Index;
 import org.elasticsearch.index.cache.query.TrivialQueryCachingPolicy;
-import org.elasticsearch.index.fielddata.FieldDataContext;
-import org.elasticsearch.index.fielddata.IndexFieldData;
-import org.elasticsearch.index.fielddata.IndexFieldDataCache;
 import org.elasticsearch.index.mapper.MappedFieldType;
-import org.elasticsearch.index.mapper.NestedLookup;
 import org.elasticsearch.index.mapper.NumberFieldMapper;
-import org.elasticsearch.index.query.SearchExecutionContext;
-import org.elasticsearch.index.query.support.NestedScope;
 import org.elasticsearch.indices.CrankyCircuitBreakerService;
-import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
 import org.elasticsearch.search.internal.ContextIndexSearcher;
-import org.elasticsearch.search.internal.SearchContext;
+import org.elasticsearch.search.sort.SortAndFormats;
+import org.elasticsearch.search.sort.SortBuilder;
 import org.junit.After;
 
 import java.io.IOException;
-import java.io.UncheckedIOException;
 import java.util.ArrayList;
 import java.util.List;
+import java.util.Optional;
 import java.util.function.Function;
 
 import static org.hamcrest.Matchers.both;
@@ -55,10 +48,6 @@
 import static org.hamcrest.Matchers.hasSize;
 import static org.hamcrest.Matchers.lessThan;
 import static org.hamcrest.Matchers.lessThanOrEqualTo;
-import static org.mockito.ArgumentMatchers.any;
-import static org.mockito.ArgumentMatchers.anyString;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.when;
 
 public class LuceneSourceOperatorTests extends AnyOperatorTestCase {
     private static final MappedFieldType S_FIELD = new NumberFieldMapper.NumberFieldType("s", NumberFieldMapper.NumberType.LONG);
@@ -97,24 +86,8 @@ private LuceneSourceOperator.Factory simple(DataPartitioning dataPartitioning, i
             throw new RuntimeException(e);
         }
 
-        SearchContext ctx = mockSearchContext(reader, 0);
-        when(ctx.getSearchExecutionContext().getFieldType(anyString())).thenAnswer(inv -> {
-            String name = inv.getArgument(0);
-            return switch (name) {
-                case "s" -> S_FIELD;
-                default -> throw new IllegalArgumentException("don't support [" + name + "]");
-            };
-        });
-        when(ctx.getSearchExecutionContext().getForField(any(), any())).thenAnswer(inv -> {
-            MappedFieldType ft = inv.getArgument(0);
-            IndexFieldData.Builder builder = ft.fielddataBuilder(FieldDataContext.noRuntimeFields("test"));
-            // This breaker is for fielddata from text fields. We don't test it so it won't break not test not to use a breaker here.
-            return builder.build(new IndexFieldDataCache.None(), new NoneCircuitBreakerService());
-        });
-        when(ctx.getSearchExecutionContext().nestedScope()).thenReturn(new NestedScope());
-        when(ctx.getSearchExecutionContext().nestedLookup()).thenReturn(NestedLookup.EMPTY);
-        when(ctx.getSearchExecutionContext().getIndexReader()).thenReturn(reader);
-        Function queryFunction = c -> new MatchAllDocsQuery();
+        ShardContext ctx = new MockShardContext(reader, 0);
+        Function queryFunction = c -> new MatchAllDocsQuery();
         int maxPageSize = between(10, Math.max(10, numDocs));
         return new LuceneSourceOperator.Factory(List.of(ctx), queryFunction, dataPartitioning, 1, maxPageSize, limit);
     }
@@ -206,24 +179,43 @@ private void testSimple(DriverContext ctx, int size, int limit) {
      * Creates a mock search context with the given index reader.
      * The returned mock search context can be used to test with {@link LuceneOperator}.
      */
-    public static SearchContext mockSearchContext(IndexReader reader, int shardId) {
-        try {
-            ContextIndexSearcher searcher = new ContextIndexSearcher(
-                reader,
-                IndexSearcher.getDefaultSimilarity(),
-                IndexSearcher.getDefaultQueryCache(),
-                TrivialQueryCachingPolicy.NEVER,
-                true
-            );
-            SearchContext searchContext = mock(SearchContext.class);
-            when(searchContext.searcher()).thenReturn(searcher);
-            SearchExecutionContext searchExecutionContext = mock(SearchExecutionContext.class);
-            when(searchContext.getSearchExecutionContext()).thenReturn(searchExecutionContext);
-            when(searchExecutionContext.getFullyQualifiedIndex()).thenReturn(new Index("test", "uid"));
-            when(searchExecutionContext.getShardId()).thenReturn(shardId);
-            return searchContext;
-        } catch (IOException e) {
-            throw new UncheckedIOException(e);
+    public static class MockShardContext implements ShardContext {
+        private final int index;
+        private final ContextIndexSearcher searcher;
+
+        public MockShardContext(IndexReader reader, int index) {
+            this.index = index;
+            try {
+                this.searcher = new ContextIndexSearcher(
+                    reader,
+                    IndexSearcher.getDefaultSimilarity(),
+                    IndexSearcher.getDefaultQueryCache(),
+                    TrivialQueryCachingPolicy.NEVER,
+                    true
+                );
+            } catch (IOException e) {
+                throw new AssertionError(e);
+            }
+        }
+
+        @Override
+        public int index() {
+            return index;
+        }
+
+        @Override
+        public IndexSearcher searcher() {
+            return searcher;
+        }
+
+        @Override
+        public Optional buildSort(List> sorts) {
+            return Optional.empty();
+        }
+
+        @Override
+        public String shardIdentifier() {
+            return "test";
         }
     }
 }
diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorTests.java
index 5776c45274ad1..57f3dd5412ca1 100644
--- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorTests.java
+++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorTests.java
@@ -13,6 +13,10 @@
 import org.apache.lucene.index.NoMergePolicy;
 import org.apache.lucene.search.MatchAllDocsQuery;
 import org.apache.lucene.search.Query;
+import org.apache.lucene.search.Sort;
+import org.apache.lucene.search.SortField;
+import org.apache.lucene.search.SortedNumericSelector;
+import org.apache.lucene.search.SortedNumericSortField;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.tests.index.RandomIndexWriter;
 import org.elasticsearch.common.breaker.CircuitBreakingException;
@@ -26,30 +30,23 @@
 import org.elasticsearch.compute.operator.OperatorTestCase;
 import org.elasticsearch.compute.operator.TestResultPageSinkOperator;
 import org.elasticsearch.core.IOUtils;
-import org.elasticsearch.index.fielddata.FieldDataContext;
-import org.elasticsearch.index.fielddata.IndexFieldData;
-import org.elasticsearch.index.fielddata.IndexFieldDataCache;
 import org.elasticsearch.index.mapper.MappedFieldType;
-import org.elasticsearch.index.mapper.NestedLookup;
 import org.elasticsearch.index.mapper.NumberFieldMapper;
-import org.elasticsearch.index.query.support.NestedScope;
 import org.elasticsearch.indices.CrankyCircuitBreakerService;
-import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
-import org.elasticsearch.search.internal.SearchContext;
+import org.elasticsearch.search.DocValueFormat;
 import org.elasticsearch.search.sort.FieldSortBuilder;
+import org.elasticsearch.search.sort.SortAndFormats;
 import org.elasticsearch.search.sort.SortBuilder;
 import org.junit.After;
 
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
+import java.util.Optional;
 import java.util.function.Function;
 
 import static org.hamcrest.Matchers.equalTo;
 import static org.hamcrest.Matchers.hasSize;
-import static org.mockito.ArgumentMatchers.any;
-import static org.mockito.ArgumentMatchers.anyString;
-import static org.mockito.Mockito.when;
 
 public class LuceneTopNSourceOperatorTests extends AnyOperatorTestCase {
     private static final MappedFieldType S_FIELD = new NumberFieldMapper.NumberFieldType("s", NumberFieldMapper.NumberType.LONG);
@@ -88,24 +85,14 @@ private LuceneTopNSourceOperator.Factory simple(DataPartitioning dataPartitionin
             throw new RuntimeException(e);
         }
 
-        SearchContext ctx = LuceneSourceOperatorTests.mockSearchContext(reader, 0);
-        when(ctx.getSearchExecutionContext().getFieldType(anyString())).thenAnswer(inv -> {
-            String name = inv.getArgument(0);
-            return switch (name) {
-                case "s" -> S_FIELD;
-                default -> throw new IllegalArgumentException("don't support [" + name + "]");
-            };
-        });
-        when(ctx.getSearchExecutionContext().getForField(any(), any())).thenAnswer(inv -> {
-            MappedFieldType ft = inv.getArgument(0);
-            IndexFieldData.Builder builder = ft.fielddataBuilder(FieldDataContext.noRuntimeFields("test"));
-            // This breaker is used for fielddata but we're not testing that.
-            return builder.build(new IndexFieldDataCache.None(), new NoneCircuitBreakerService());
-        });
-        when(ctx.getSearchExecutionContext().nestedScope()).thenReturn(new NestedScope());
-        when(ctx.getSearchExecutionContext().nestedLookup()).thenReturn(NestedLookup.EMPTY);
-        when(ctx.getSearchExecutionContext().getIndexReader()).thenReturn(reader);
-        Function queryFunction = c -> new MatchAllDocsQuery();
+        ShardContext ctx = new LuceneSourceOperatorTests.MockShardContext(reader, 0) {
+            @Override
+            public Optional buildSort(List> sorts) {
+                SortField field = new SortedNumericSortField("s", SortField.Type.LONG, false, SortedNumericSelector.Type.MIN);
+                return Optional.of(new SortAndFormats(new Sort(field), new DocValueFormat[] { null }));
+            }
+        };
+        Function queryFunction = c -> new MatchAllDocsQuery();
         int taskConcurrency = 0;
         int maxPageSize = between(10, Math.max(10, size));
         List> sorts = List.of(new FieldSortBuilder("s"));
diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java
index ada0582a2fad8..330a7293a9a67 100644
--- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java
+++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java
@@ -65,7 +65,6 @@
 import org.elasticsearch.index.mapper.TextFieldMapper;
 import org.elasticsearch.index.mapper.TextSearchInfo;
 import org.elasticsearch.index.mapper.TsidExtractingIdFieldMapper;
-import org.elasticsearch.search.internal.SearchContext;
 import org.elasticsearch.search.lookup.SearchLookup;
 import org.elasticsearch.xcontent.XContentBuilder;
 import org.elasticsearch.xcontent.XContentType;
@@ -86,7 +85,6 @@
 import java.util.stream.Collectors;
 import java.util.stream.IntStream;
 
-import static org.elasticsearch.compute.lucene.LuceneSourceOperatorTests.mockSearchContext;
 import static org.elasticsearch.test.MapMatcher.assertMap;
 import static org.elasticsearch.test.MapMatcher.matchesMap;
 import static org.hamcrest.Matchers.equalTo;
@@ -164,7 +162,7 @@ private SourceOperator simpleInput(DriverContext context, int size, int commitEv
             throw new RuntimeException(e);
         }
         var luceneFactory = new LuceneSourceOperator.Factory(
-            List.of(mockSearchContext(reader, 0)),
+            List.of(new LuceneSourceOperatorTests.MockShardContext(reader, 0)),
             ctx -> new MatchAllDocsQuery(),
             DataPartitioning.SHARD,
             randomIntBetween(1, 10),
@@ -1268,7 +1266,7 @@ public void testWithNulls() throws IOException {
 
         DriverContext driverContext = driverContext();
         var luceneFactory = new LuceneSourceOperator.Factory(
-            List.of(mockSearchContext(reader, 0)),
+            List.of(new LuceneSourceOperatorTests.MockShardContext(reader, 0)),
             ctx -> new MatchAllDocsQuery(),
             randomFrom(DataPartitioning.values()),
             randomIntBetween(1, 10),
@@ -1483,10 +1481,10 @@ public void testManyShards() throws IOException {
                 closeMe[d * 2 + 1] = dirs[d] = newDirectory();
                 closeMe[d * 2] = readers[d] = initIndex(dirs[d], size, between(10, size * 2));
             }
-            List contexts = new ArrayList<>();
+            List contexts = new ArrayList<>();
             List readerShardContexts = new ArrayList<>();
             for (int s = 0; s < shardCount; s++) {
-                contexts.add(mockSearchContext(readers[s], s));
+                contexts.add(new LuceneSourceOperatorTests.MockShardContext(readers[s], s));
                 readerShardContexts.add(new ValuesSourceReaderOperator.ShardContext(readers[s], () -> SourceLoader.FROM_STORED_SOURCE));
             }
             var luceneFactory = new LuceneSourceOperator.Factory(
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java
index a533c373ad2ca..789c15ee156ea 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java
@@ -31,7 +31,6 @@
 import org.elasticsearch.compute.data.ElementType;
 import org.elasticsearch.compute.data.LocalCircuitBreaker;
 import org.elasticsearch.compute.data.Page;
-import org.elasticsearch.compute.lucene.BlockReaderFactories;
 import org.elasticsearch.compute.lucene.ValuesSourceReaderOperator;
 import org.elasticsearch.compute.operator.Driver;
 import org.elasticsearch.compute.operator.DriverContext;
@@ -74,6 +73,7 @@
 import org.elasticsearch.xpack.esql.io.stream.PlanNameRegistry;
 import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput;
 import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput;
+import org.elasticsearch.xpack.esql.planner.EsPhysicalOperationProviders;
 import org.elasticsearch.xpack.esql.planner.PlannerUtils;
 import org.elasticsearch.xpack.esql.plugin.EsqlPlugin;
 import org.elasticsearch.xpack.esql.type.EsqlDataTypes;
@@ -274,8 +274,12 @@ private void doLookup(
                 NamedExpression extractField = extractFields.get(i);
                 final ElementType elementType = PlannerUtils.toElementType(extractField.dataType());
                 mergingTypes[i] = elementType;
-                BlockLoader loader = BlockReaderFactories.loader(
+                EsPhysicalOperationProviders.ShardContext ctx = new EsPhysicalOperationProviders.DefaultShardContext(
+                    0,
                     searchContext.getSearchExecutionContext(),
+                    searchContext.request().getAliasFilter()
+                );
+                BlockLoader loader = ctx.blockLoader(
                     extractField instanceof Alias a ? ((NamedExpression) a.child()).name() : extractField.name(),
                     EsqlDataTypes.isUnsupported(extractField.dataType())
                 );
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsPhysicalOperationProviders.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsPhysicalOperationProviders.java
index b324cf7c4056a..43d02a00c4db4 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsPhysicalOperationProviders.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsPhysicalOperationProviders.java
@@ -9,24 +9,32 @@
 
 import org.apache.lucene.search.BooleanClause;
 import org.apache.lucene.search.BooleanQuery;
+import org.apache.lucene.search.IndexSearcher;
 import org.apache.lucene.search.Query;
+import org.elasticsearch.common.logging.HeaderWarning;
 import org.elasticsearch.compute.aggregation.GroupingAggregator;
+import org.elasticsearch.compute.data.Block;
 import org.elasticsearch.compute.data.ElementType;
-import org.elasticsearch.compute.lucene.BlockReaderFactories;
+import org.elasticsearch.compute.lucene.LuceneCountOperator;
 import org.elasticsearch.compute.lucene.LuceneOperator;
 import org.elasticsearch.compute.lucene.LuceneSourceOperator;
 import org.elasticsearch.compute.lucene.LuceneTopNSourceOperator;
 import org.elasticsearch.compute.lucene.ValuesSourceReaderOperator;
 import org.elasticsearch.compute.operator.Operator;
 import org.elasticsearch.compute.operator.OrdinalsGroupingOperator;
+import org.elasticsearch.compute.operator.SourceOperator;
 import org.elasticsearch.index.mapper.BlockLoader;
+import org.elasticsearch.index.mapper.FieldNamesFieldMapper;
+import org.elasticsearch.index.mapper.MappedFieldType;
 import org.elasticsearch.index.mapper.NestedLookup;
+import org.elasticsearch.index.mapper.SourceLoader;
 import org.elasticsearch.index.query.QueryBuilder;
 import org.elasticsearch.index.query.QueryBuilders;
 import org.elasticsearch.index.query.SearchExecutionContext;
 import org.elasticsearch.index.search.NestedHelper;
 import org.elasticsearch.search.internal.AliasFilter;
-import org.elasticsearch.search.internal.SearchContext;
+import org.elasticsearch.search.lookup.SearchLookup;
+import org.elasticsearch.search.sort.SortAndFormats;
 import org.elasticsearch.search.sort.SortBuilder;
 import org.elasticsearch.xpack.esql.plan.physical.AggregateExec;
 import org.elasticsearch.xpack.esql.plan.physical.EsQueryExec;
@@ -37,10 +45,14 @@
 import org.elasticsearch.xpack.esql.planner.LocalExecutionPlanner.PhysicalOperation;
 import org.elasticsearch.xpack.esql.type.EsqlDataTypes;
 import org.elasticsearch.xpack.ql.expression.Attribute;
+import org.elasticsearch.xpack.ql.expression.Expression;
 import org.elasticsearch.xpack.ql.type.DataType;
 
+import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
+import java.util.Optional;
+import java.util.Set;
 import java.util.function.Function;
 import java.util.function.IntFunction;
 
@@ -48,15 +60,30 @@
 import static org.elasticsearch.compute.lucene.LuceneSourceOperator.NO_LIMIT;
 
 public class EsPhysicalOperationProviders extends AbstractPhysicalOperationProviders {
+    /**
+     * Context of each shard we're operating against.
+     */
+    public interface ShardContext extends org.elasticsearch.compute.lucene.ShardContext {
+        /**
+         * Build something to load source {@code _source}.
+         */
+        SourceLoader newSourceLoader();
 
-    private final List searchContexts;
+        /**
+         * Convert a {@link QueryBuilder} into a real {@link Query lucene query}.
+         */
+        Query toQuery(QueryBuilder queryBuilder);
 
-    public EsPhysicalOperationProviders(List searchContexts) {
-        this.searchContexts = searchContexts;
+        /**
+         * Returns something to load values from this field into a {@link Block}.
+         */
+        BlockLoader blockLoader(String name, boolean asUnsupportedSource);
     }
 
-    public List searchContexts() {
-        return searchContexts;
+    private final List shardContexts;
+
+    public EsPhysicalOperationProviders(List shardContexts) {
+        this.shardContexts = shardContexts;
     }
 
     @Override
@@ -66,7 +93,7 @@ public final PhysicalOperation fieldExtractPhysicalOperation(FieldExtractExec fi
         // to GeoPointFieldMapper.blockLoader
         Layout.Builder layout = source.layout.builder();
         var sourceAttr = fieldExtractExec.sourceAttribute();
-        List readers = searchContexts.stream()
+        List readers = shardContexts.stream()
             .map(s -> new ValuesSourceReaderOperator.ShardContext(s.searcher().getIndexReader(), s::newSourceLoader))
             .toList();
         List fields = new ArrayList<>();
@@ -77,46 +104,19 @@ public final PhysicalOperation fieldExtractPhysicalOperation(FieldExtractExec fi
             ElementType elementType = PlannerUtils.toElementType(dataType);
             String fieldName = attr.name();
             boolean isSupported = EsqlDataTypes.isUnsupported(dataType);
-            IntFunction loader = s -> BlockReaderFactories.loader(
-                searchContexts.get(s).getSearchExecutionContext(),
-                fieldName,
-                isSupported
-            );
+            IntFunction loader = s -> shardContexts.get(s).blockLoader(fieldName, isSupported);
             fields.add(new ValuesSourceReaderOperator.FieldInfo(fieldName, elementType, loader));
         }
         return source.with(new ValuesSourceReaderOperator.Factory(fields, readers, docChannel), layout.build());
     }
 
-    public static Function querySupplier(QueryBuilder queryBuilder) {
-        final QueryBuilder qb = queryBuilder == null ? QueryBuilders.matchAllQuery() : queryBuilder;
-
-        return searchContext -> {
-            SearchExecutionContext ctx = searchContext.getSearchExecutionContext();
-            Query query = ctx.toQuery(qb).query();
-            NestedLookup nestedLookup = ctx.nestedLookup();
-            if (nestedLookup != NestedLookup.EMPTY) {
-                NestedHelper nestedHelper = new NestedHelper(nestedLookup, ctx::isFieldMapped);
-                if (nestedHelper.mightMatchNestedDocs(query)) {
-                    // filter out nested documents
-                    query = new BooleanQuery.Builder().add(query, BooleanClause.Occur.MUST)
-                        .add(newNonNestedFilter(ctx.indexVersionCreated()), BooleanClause.Occur.FILTER)
-                        .build();
-                }
-            }
-            AliasFilter aliasFilter = searchContext.request().getAliasFilter();
-            if (aliasFilter != AliasFilter.EMPTY) {
-                Query filterQuery = ctx.toQuery(aliasFilter.getQueryBuilder()).query();
-                query = new BooleanQuery.Builder().add(query, BooleanClause.Occur.MUST)
-                    .add(filterQuery, BooleanClause.Occur.FILTER)
-                    .build();
-            }
-            return query;
-        };
+    public Function querySupplier(QueryBuilder builder) {
+        QueryBuilder qb = builder == null ? QueryBuilders.matchAllQuery() : builder;
+        return ctx -> shardContexts.get(ctx.index()).toQuery(qb);
     }
 
     @Override
     public final PhysicalOperation sourcePhysicalOperation(EsQueryExec esQueryExec, LocalExecutionPlannerContext context) {
-        Function querySupplier = querySupplier(esQueryExec.query());
         final LuceneOperator.Factory luceneFactory;
 
         List sorts = esQueryExec.sorts();
@@ -130,8 +130,8 @@ public final PhysicalOperation sourcePhysicalOperation(EsQueryExec esQueryExec,
                 fieldSorts.add(sort.fieldSortBuilder());
             }
             luceneFactory = new LuceneTopNSourceOperator.Factory(
-                searchContexts,
-                querySupplier,
+                shardContexts,
+                querySupplier(esQueryExec.query()),
                 context.queryPragmas().dataPartitioning(),
                 context.queryPragmas().taskConcurrency(),
                 context.pageSize(rowEstimatedSize),
@@ -140,8 +140,8 @@ public final PhysicalOperation sourcePhysicalOperation(EsQueryExec esQueryExec,
             );
         } else {
             luceneFactory = new LuceneSourceOperator.Factory(
-                searchContexts,
-                querySupplier,
+                shardContexts,
+                querySupplier(esQueryExec.query()),
                 context.queryPragmas().dataPartitioning(),
                 context.queryPragmas().taskConcurrency(),
                 context.pageSize(rowEstimatedSize),
@@ -155,6 +155,19 @@ public final PhysicalOperation sourcePhysicalOperation(EsQueryExec esQueryExec,
         return PhysicalOperation.fromSource(luceneFactory, layout.build());
     }
 
+    /**
+     * Build a {@link SourceOperator.SourceOperatorFactory} that counts documents in the search index.
+     */
+    public LuceneCountOperator.Factory countSource(LocalExecutionPlannerContext context, QueryBuilder queryBuilder, Expression limit) {
+        return new LuceneCountOperator.Factory(
+            shardContexts,
+            querySupplier(queryBuilder),
+            context.queryPragmas().dataPartitioning(),
+            context.queryPragmas().taskConcurrency(),
+            limit == null ? NO_LIMIT : (Integer) limit.fold()
+        );
+    }
+
     @Override
     public final Operator.OperatorFactory ordinalGroupingOperatorFactory(
         LocalExecutionPlanner.PhysicalOperation source,
@@ -166,19 +179,15 @@ public final Operator.OperatorFactory ordinalGroupingOperatorFactory(
     ) {
         var sourceAttribute = FieldExtractExec.extractSourceAttributesFrom(aggregateExec.child());
         int docChannel = source.layout.get(sourceAttribute.id()).channel();
-        List shardContexts = searchContexts.stream()
+        List vsShardContexts = shardContexts.stream()
             .map(s -> new ValuesSourceReaderOperator.ShardContext(s.searcher().getIndexReader(), s::newSourceLoader))
             .toList();
         // The grouping-by values are ready, let's group on them directly.
         // Costin: why are they ready and not already exposed in the layout?
         boolean isUnsupported = EsqlDataTypes.isUnsupported(attrSource.dataType());
         return new OrdinalsGroupingOperator.OrdinalsGroupingOperatorFactory(
-            shardIdx -> BlockReaderFactories.loader(
-                searchContexts.get(shardIdx).getSearchExecutionContext(),
-                attrSource.name(),
-                isUnsupported
-            ),
-            shardContexts,
+            shardIdx -> shardContexts.get(shardIdx).blockLoader(attrSource.name(), isUnsupported),
+            vsShardContexts,
             groupElementType,
             docChannel,
             attrSource.name(),
@@ -186,4 +195,107 @@ public final Operator.OperatorFactory ordinalGroupingOperatorFactory(
             context.pageSize(aggregateExec.estimatedRowSize())
         );
     }
+
+    public static class DefaultShardContext implements ShardContext {
+        private final int index;
+        private final SearchExecutionContext ctx;
+        private final AliasFilter aliasFilter;
+
+        public DefaultShardContext(int index, SearchExecutionContext ctx, AliasFilter aliasFilter) {
+            this.index = index;
+            this.ctx = ctx;
+            this.aliasFilter = aliasFilter;
+        }
+
+        @Override
+        public int index() {
+            return index;
+        }
+
+        @Override
+        public IndexSearcher searcher() {
+            return ctx.searcher();
+        }
+
+        @Override
+        public Optional buildSort(List> sorts) throws IOException {
+            return SortBuilder.buildSort(sorts, ctx);
+        }
+
+        @Override
+        public String shardIdentifier() {
+            return ctx.getFullyQualifiedIndex().getName() + ":" + ctx.getShardId();
+        }
+
+        @Override
+        public SourceLoader newSourceLoader() {
+            return ctx.newSourceLoader(false);
+        }
+
+        @Override
+        public Query toQuery(QueryBuilder queryBuilder) {
+            Query query = ctx.toQuery(queryBuilder).query();
+            NestedLookup nestedLookup = ctx.nestedLookup();
+            if (nestedLookup != NestedLookup.EMPTY) {
+                NestedHelper nestedHelper = new NestedHelper(nestedLookup, ctx::isFieldMapped);
+                if (nestedHelper.mightMatchNestedDocs(query)) {
+                    // filter out nested documents
+                    query = new BooleanQuery.Builder().add(query, BooleanClause.Occur.MUST)
+                        .add(newNonNestedFilter(ctx.indexVersionCreated()), BooleanClause.Occur.FILTER)
+                        .build();
+                }
+            }
+            if (aliasFilter != AliasFilter.EMPTY) {
+                Query filterQuery = ctx.toQuery(aliasFilter.getQueryBuilder()).query();
+                query = new BooleanQuery.Builder().add(query, BooleanClause.Occur.MUST)
+                    .add(filterQuery, BooleanClause.Occur.FILTER)
+                    .build();
+            }
+            return query;
+        }
+
+        @Override
+        public BlockLoader blockLoader(String name, boolean asUnsupportedSource) {
+            if (asUnsupportedSource) {
+                return BlockLoader.CONSTANT_NULLS;
+            }
+            MappedFieldType fieldType = ctx.getFieldType(name);
+            if (fieldType == null) {
+                // the field does not exist in this context
+                return BlockLoader.CONSTANT_NULLS;
+            }
+            BlockLoader loader = fieldType.blockLoader(new MappedFieldType.BlockLoaderContext() {
+                @Override
+                public String indexName() {
+                    return ctx.getFullyQualifiedIndex().getName();
+                }
+
+                @Override
+                public SearchLookup lookup() {
+                    return ctx.lookup();
+                }
+
+                @Override
+                public Set sourcePaths(String name) {
+                    return ctx.sourcePath(name);
+                }
+
+                @Override
+                public String parentField(String field) {
+                    return ctx.parentPath(field);
+                }
+
+                @Override
+                public FieldNamesFieldMapper.FieldNamesFieldType fieldNames() {
+                    return (FieldNamesFieldMapper.FieldNamesFieldType) ctx.lookup().fieldType(FieldNamesFieldMapper.NAME);
+                }
+            });
+            if (loader == null) {
+                HeaderWarning.addWarning("Field [{}] cannot be retrieved, it is unsupported or not indexed; returning null", name);
+                return BlockLoader.CONSTANT_NULLS;
+            }
+
+            return loader;
+        }
+    }
 }
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java
index 15aec4545e7e7..d79becfc8a736 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java
@@ -7,7 +7,6 @@
 
 package org.elasticsearch.xpack.esql.planner;
 
-import org.apache.lucene.search.Query;
 import org.elasticsearch.common.settings.Settings;
 import org.elasticsearch.common.util.BigArrays;
 import org.elasticsearch.common.util.iterable.Iterables;
@@ -17,7 +16,6 @@
 import org.elasticsearch.compute.data.ElementType;
 import org.elasticsearch.compute.data.LocalCircuitBreaker;
 import org.elasticsearch.compute.data.Page;
-import org.elasticsearch.compute.lucene.LuceneCountOperator;
 import org.elasticsearch.compute.lucene.LuceneOperator;
 import org.elasticsearch.compute.operator.ColumnExtractOperator;
 import org.elasticsearch.compute.operator.Driver;
@@ -49,7 +47,6 @@
 import org.elasticsearch.core.TimeValue;
 import org.elasticsearch.logging.LogManager;
 import org.elasticsearch.logging.Logger;
-import org.elasticsearch.search.internal.SearchContext;
 import org.elasticsearch.tasks.CancellableTask;
 import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException;
 import org.elasticsearch.xpack.esql.enrich.EnrichLookupOperator;
@@ -102,7 +99,6 @@
 
 import static java.util.Arrays.asList;
 import static java.util.stream.Collectors.joining;
-import static org.elasticsearch.compute.lucene.LuceneOperator.NO_LIMIT;
 import static org.elasticsearch.compute.operator.LimitOperator.Factory;
 import static org.elasticsearch.compute.operator.ProjectOperator.ProjectOperatorFactory;
 
@@ -253,17 +249,7 @@ private PhysicalOperation planEsStats(EsStatsQueryExec statsQuery, LocalExecutio
         EsStatsQueryExec.Stat stat = statsQuery.stats().get(0);
 
         EsPhysicalOperationProviders esProvider = (EsPhysicalOperationProviders) physicalOperationProviders;
-        Function querySupplier = EsPhysicalOperationProviders.querySupplier(stat.filter(statsQuery.query()));
-
-        Expression limitExp = statsQuery.limit();
-        int limit = limitExp != null ? (Integer) limitExp.fold() : NO_LIMIT;
-        final LuceneOperator.Factory luceneFactory = new LuceneCountOperator.Factory(
-            esProvider.searchContexts(),
-            querySupplier,
-            context.queryPragmas.dataPartitioning(),
-            context.queryPragmas.taskConcurrency(),
-            limit
-        );
+        final LuceneOperator.Factory luceneFactory = esProvider.countSource(context, stat.filter(statsQuery.query()), statsQuery.limit());
 
         Layout.Builder layout = new Layout.Builder();
         layout.append(statsQuery.outputSet());
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java
index aa1eafbf90265..e781ed4a60c35 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java
@@ -363,6 +363,17 @@ private ActionListener cancelOnFailure(CancellableTask task, AtomicBoolean
 
     void runCompute(CancellableTask task, ComputeContext context, PhysicalPlan plan, ActionListener> listener) {
         listener = ActionListener.runAfter(listener, () -> Releasables.close(context.searchContexts));
+        List contexts = new ArrayList<>(context.searchContexts.size());
+        for (int i = 0; i < context.searchContexts.size(); i++) {
+            SearchContext searchContext = context.searchContexts.get(i);
+            contexts.add(
+                new EsPhysicalOperationProviders.DefaultShardContext(
+                    i,
+                    searchContext.getSearchExecutionContext(),
+                    searchContext.request().getAliasFilter()
+                )
+            );
+        }
         final List drivers;
         try {
             LocalExecutionPlanner planner = new LocalExecutionPlanner(
@@ -375,7 +386,7 @@ void runCompute(CancellableTask task, ComputeContext context, PhysicalPlan plan,
                 context.exchangeSource(),
                 context.exchangeSink(),
                 enrichLookupService,
-                new EsPhysicalOperationProviders(context.searchContexts)
+                new EsPhysicalOperationProviders(contexts)
             );
 
             LOGGER.debug("Received physical plan:\n{}", plan);
diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java
index 27a45e71a69c1..3ac1453e6ad8f 100644
--- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java
+++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java
@@ -28,8 +28,6 @@
 import org.elasticsearch.index.cache.query.TrivialQueryCachingPolicy;
 import org.elasticsearch.index.mapper.MapperServiceTestCase;
 import org.elasticsearch.search.internal.ContextIndexSearcher;
-import org.elasticsearch.search.internal.SearchContext;
-import org.elasticsearch.test.TestSearchContext;
 import org.elasticsearch.xpack.esql.TestBlockFactory;
 import org.elasticsearch.xpack.esql.plan.physical.EsQueryExec;
 import org.elasticsearch.xpack.esql.plugin.EsqlPlugin;
@@ -150,7 +148,7 @@ private EsqlConfiguration config() {
 
     private EsPhysicalOperationProviders esPhysicalOperationProviders() throws IOException {
         int numShards = randomIntBetween(1, 1000);
-        List searchContexts = new ArrayList<>(numShards);
+        List shardContexts = new ArrayList<>(numShards);
         var searcher = new ContextIndexSearcher(
             reader(),
             IndexSearcher.getDefaultSimilarity(),
@@ -159,12 +157,16 @@ private EsPhysicalOperationProviders esPhysicalOperationProviders() throws IOExc
             true
         );
         for (int i = 0; i < numShards; i++) {
-            searchContexts.add(
-                new TestSearchContext(createSearchExecutionContext(createMapperService(mapping(b -> {})), searcher), null, searcher)
+            shardContexts.add(
+                new EsPhysicalOperationProviders.DefaultShardContext(
+                    i,
+                    createSearchExecutionContext(createMapperService(mapping(b -> {})), searcher),
+                    null
+                )
             );
         }
-        releasables.addAll(searchContexts);
-        return new EsPhysicalOperationProviders(searchContexts);
+        releasables.add(searcher);
+        return new EsPhysicalOperationProviders(shardContexts);
     }
 
     private IndexReader reader() {